inoid commited on
Commit
5a99733
1 Parent(s): 9c27a34

Update local changes

Browse files
Files changed (2) hide show
  1. app.py +7 -22
  2. seminar_edition_ai.py +26 -4
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from llm_call import GeminiLLM
3
  from seminar_edition_ai import upload_file_ex, predictContemplando, predictProclamando, predictFromInit, \
4
- downloadSermonFile, fileAddresToDownload, predictQuestionBuild, predictDevotionBuild, \
5
  contemplandoQuestion, proclamandoQuestion, llm, embed_model
6
 
7
  HISTORY_ANSWER = ''
@@ -13,7 +13,11 @@ def activeSermonGuideZone(KEY):
13
  def showMessage(questionAnswer, KEY):
14
  if questionAnswer == None or questionAnswer == '' or len(questionAnswer) <= 7:
15
  raise gr.Error(f"You must write some answer or more longer {KEY}")
16
- raise gr.Error(f" No implemented yet {KEY}!!!")
 
 
 
 
17
 
18
  with gr.Blocks() as demo:
19
 
@@ -32,7 +36,7 @@ with gr.Blocks() as demo:
32
  )
33
 
34
  text_button.click(
35
- fn=predictFromInit,
36
  inputs=text_input,
37
  outputs=text_output
38
  )
@@ -42,25 +46,6 @@ with gr.Blocks() as demo:
42
  inputs=text_output
43
  )
44
  with gr.Tab("Obtener gu铆a de la comunidad (Preguntas)"):
45
- with gr.Accordion("Contemplando y Proclamando", open=False):
46
- checkButton = gr.Checkbox(
47
- value=False,
48
- label="Mantener historial"
49
- )
50
- with gr.Row():
51
- with gr.Tab("Contemplando"):
52
- inbtwContemplando = gr.Button(f"Devocionalmente: {contemplandoQuestion['DEVOCIONALMENTE']}")
53
- inbtwContemplandoOne = gr.Button(f"Ex茅gesis: {contemplandoQuestion['EX脡GESIS']}")
54
- inbtwContemplandoTwo = gr.Button(f"Cristo: {contemplandoQuestion['CRISTO']}")
55
- inbtwContemplandoTree = gr.Button(f"Arco Redentor: {contemplandoQuestion['ARCO REDENTOR']}")
56
- inbtwContemplandoFour = gr.Button(f"Evangeli贸n: {contemplandoQuestion['EVANGELION']}")
57
- inbtwContemplandoFourOne = gr.Button(f"Evangeli贸n: {contemplandoQuestion['EVANGELION_TWO']}")
58
-
59
- with gr.Tab("Proclamando"):
60
- inbtwProclamando = gr.Button(f"P煤blico: {proclamandoQuestion['P脷BLICO']}")
61
- inbtwProclamandoOne = gr.Button(f"Historia: {proclamandoQuestion['HISTORIA']}")
62
- inbtwProclamandoTwo = gr.Button(f"Expectativas: {proclamandoQuestion['EXPECTATIVAS']}")
63
- inbtwProclamandoTwoTwo = gr.Button(f"Expectativas: {proclamandoQuestion['EXPECTATIVAS_TWO']}")
64
  with gr.Row():
65
  #Bibliografy about components
66
  # File (https://www.gradio.app/docs/gradio/file)
 
1
  import gradio as gr
2
  from llm_call import GeminiLLM
3
  from seminar_edition_ai import upload_file_ex, predictContemplando, predictProclamando, predictFromInit, \
4
+ downloadSermonFile, fileAddresToDownload, predictQuestionBuild, predictDevotionBuild, predictArgumentQuestionBuild, \
5
  contemplandoQuestion, proclamandoQuestion, llm, embed_model
6
 
7
  HISTORY_ANSWER = ''
 
13
  def showMessage(questionAnswer, KEY):
14
  if questionAnswer == None or questionAnswer == '' or len(questionAnswer) <= 7:
15
  raise gr.Error(f"You must write some answer or more longer {KEY}")
16
+ else:
17
+ try:
18
+ return predictArgumentQuestionBuild(questionAnswer)
19
+ except Exception as e:
20
+ raise gr.Error(f" Error on call AI {e}!!!")
21
 
22
  with gr.Blocks() as demo:
23
 
 
36
  )
37
 
38
  text_button.click(
39
+ fn = predictFromInit,
40
  inputs=text_input,
41
  outputs=text_output
42
  )
 
46
  inputs=text_output
47
  )
48
  with gr.Tab("Obtener gu铆a de la comunidad (Preguntas)"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  with gr.Row():
50
  #Bibliografy about components
51
  # File (https://www.gradio.app/docs/gradio/file)
seminar_edition_ai.py CHANGED
@@ -107,12 +107,12 @@ def predictFromInit(sermonTopic):
107
 
108
  if HISTORY_ANSWER == '':
109
  chain = updatePromptTemplate(
110
- templates.getSermonPromptTemplates('BUILD_INIT'),
111
  [keyStr,'CANT_VERSICULOS','context']
112
  )
113
  else:
114
  chain = updatePromptTemplate(
115
- templates.getSermonPromptTemplates('BUILD_EMPTY'),
116
  ['BIBLE_VERSICLE','context']
117
  )
118
  keyStr = 'BIBLE_VERSICLE'
@@ -152,7 +152,7 @@ def predictFromInit(sermonTopic):
152
  def predictQuestionBuild(sermonTopic):
153
  templates = SermonGeminiPromptTemplate()
154
  chain = updatePromptTemplate(
155
- templates.getSermonPromptTemplates('BUILD_QUESTION'),
156
  ['SERMON_IDEA', 'context']
157
  )
158
  global retriever
@@ -172,7 +172,7 @@ def predictQuestionBuild(sermonTopic):
172
  def predictDevotionBuild(sermonTopic):
173
  templates = SermonGeminiPromptTemplate()
174
  chain = updatePromptTemplate(
175
- templates.getSermonPromptTemplate('BUILD_REFLECTIONS'),
176
  ['SERMON_IDEA', 'context']
177
  )
178
  global retriever
@@ -188,6 +188,28 @@ def predictDevotionBuild(sermonTopic):
188
  return answer
189
 
190
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  # A utility function for answer generation
192
  def askQuestion(
193
  question,
 
107
 
108
  if HISTORY_ANSWER == '':
109
  chain = updatePromptTemplate(
110
+ templates.getSermonPromptTemplates()['BUILD_INIT'],
111
  [keyStr,'CANT_VERSICULOS','context']
112
  )
113
  else:
114
  chain = updatePromptTemplate(
115
+ templates.getSermonPromptTemplates()['BUILD_EMPTY'],
116
  ['BIBLE_VERSICLE','context']
117
  )
118
  keyStr = 'BIBLE_VERSICLE'
 
152
  def predictQuestionBuild(sermonTopic):
153
  templates = SermonGeminiPromptTemplate()
154
  chain = updatePromptTemplate(
155
+ templates.getSermonPromptTemplates()['BUILD_QUESTION'],
156
  ['SERMON_IDEA', 'context']
157
  )
158
  global retriever
 
172
  def predictDevotionBuild(sermonTopic):
173
  templates = SermonGeminiPromptTemplate()
174
  chain = updatePromptTemplate(
175
+ templates.getSermonPromptTemplate()['BUILD_REFLECTIONS'],
176
  ['SERMON_IDEA', 'context']
177
  )
178
  global retriever
 
188
  return answer
189
 
190
 
191
+ ####
192
+ #
193
+ ####
194
+ def predictArgumentQuestionBuild(questionAnswer):
195
+ templates = SermonGeminiPromptTemplate()
196
+ chain = updatePromptTemplate(
197
+ templates.getSermonPromptTemplate()['BUILD_ADD_INFORMATION_TO_QUEST_ANSWER'],
198
+ ['QUESTION_ANSWER', 'context']
199
+ )
200
+ global retriever
201
+ global HISTORY_ANSWER
202
+ answer = askQuestionEx(
203
+ "",
204
+ chain,
205
+ retriever,
206
+ topic = questionAnswer,
207
+ KEY = 'QUESTION_ANSWER'
208
+ )
209
+
210
+ return answer
211
+
212
+
213
  # A utility function for answer generation
214
  def askQuestion(
215
  question,