giannantonio15 commited on
Commit
a15fbb1
1 Parent(s): b8ae371

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +516 -4
app.py CHANGED
@@ -1,7 +1,519 @@
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
5
 
6
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from llama_index.core import VectorStoreIndex,StorageContext
3
+ from llama_index.core.memory import ChatMemoryBuffer
4
+ import re
5
+ from llama_index.core import get_response_synthesizer
6
+ from llama_index.core.query_engine import RetrieverQueryEngine
7
+ # Retrievers
8
+ from llama_index.core.retrievers import (
9
+ VectorIndexRetriever,
10
+ )
11
+ from llama_index.core.chat_engine import ContextChatEngine
12
+ from llama_index.core.memory import ChatMemoryBuffer
13
+ from pinecone import Pinecone
14
+ from llama_index.vector_stores.pinecone import PineconeVectorStore
15
+ import time
16
+ from utils import *
17
+ import spaces
18
+ import threading
19
+ import sys
20
+ import torch
21
 
22
+ head = """
23
+ <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">
24
 
25
+ <script>
26
+ // JavaScript function to toggle text visibility
27
+ function toggleText(id){
28
+ console.log(id)
29
+ if(id=="span1"){
30
+ nodo_id = "nodo1"
31
+ }else if(id=="span2"){
32
+ nodo_id = "nodo2"
33
+ }else if(id=="span3"){
34
+ nodo_id = "nodo3"
35
+ }else{
36
+ nodo_id = "nodo4"
37
+ }
38
+ var text = document.getElementById(nodo_id);
39
+ if (text.style.display === "none") {
40
+ text.style.display = "block";
41
+ } else {
42
+ text.style.display = "none";
43
+ }
44
+ }
45
+
46
+
47
+ </script>
48
+
49
+ """
50
+
51
+ css = """
52
+ #chatbot {
53
+ margin-top: 1%;
54
+ width: 75%;
55
+ position:relative;
56
+ height:70%;
57
+ }
58
+
59
+ #textBox{
60
+ width: 75%;
61
+ position:relative;
62
+ }
63
+
64
+ .wrapper.svelte-nab2ao p{
65
+ font-size: 14px;
66
+ }
67
+
68
+ #btnClear{
69
+ width: 75%;
70
+ }
71
+
72
+ #buttonChat{
73
+ width:50%;
74
+ position: relative;
75
+ }
76
+
77
+ #colonnaElementi{
78
+ position: absolute;
79
+ left: 77%;
80
+ top: 10%;
81
+ bottom: 10%; /* Adjust this value as necessary */
82
+ width: 10%;
83
+ height: auto; /* Let the height be determined by the top and bottom properties */
84
+ max-height: 80%; /* Ensure it does not exceed 80% of the parent container's height */
85
+ overflow-y: auto; /* Allow scrolling if content overflows vertically */
86
+ overflow-x: hidden; /* Hide horizontal overflow */
87
+ word-wrap: break-word; /* Ensure words break to fit within the width */
88
+ box-sizing: border-box; /* Include padding and border in the element's total width and height */
89
+ }
90
+
91
+ #responseMode{
92
+ width: 5%;
93
+ }
94
+
95
+ .message.user.svelte-gutj6d.message-bubble-border{
96
+ padding: 5px;
97
+ }
98
+ .message.bot.svelte-gutj6d.message-bubble-border{
99
+ padding: 5px;
100
+ }
101
+ .icon {
102
+ cursor: pointer;
103
+ }
104
+ /* Style for the hidden text */
105
+ .hidden-text {
106
+ display: none;
107
+ }
108
+
109
+ .wrap svelte-1sk0pyu{
110
+ width: 12%
111
+ }
112
+
113
+ """
114
+
115
+ user_message = ""
116
+ current_chat_mode = ""
117
+ current_response_mode = "compact"
118
+ current_collection = ""
119
+ file_path = ""
120
+ num_responses = 0
121
+ retriever = None
122
+ token_count_bandi = 0
123
+ token_count_bandi_sistema_puglia = 0
124
+ chat_engine_bandi = None
125
+ chat_engine_bandi_sistema_puglia = None
126
+ memory_bandi = None
127
+ memory_bandi_sistema_puglia = None
128
+ stream_response = None
129
+ divDocumenti = None
130
+
131
+ def main():
132
+ setGPU()
133
+ llm = setLLM()
134
+ Settings.llm = llm
135
+ Settings.embed_model = "local:google-bert/bert-base-multilingual-cased"
136
+ embed_model = Settings.embed_model
137
+ text_qa_template, refine_template = setPromptTemplate()
138
+
139
+ def select_initial_collection():
140
+ global current_collection
141
+ global retriever
142
+
143
+ pc = Pinecone(api_key="7e412663-a2dc-44a6-ab57-25dd0bdce226")
144
+ # connect to index
145
+ pinecone_index = pc.Index("indexbandisistemapuglia")
146
+
147
+ vector_store = PineconeVectorStore(
148
+ pinecone_index=pinecone_index,
149
+ add_sparse_vector=True,
150
+ )
151
+
152
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
153
+
154
+ index = VectorStoreIndex.from_vector_store(
155
+ vector_store, storage_context=storage_context
156
+ )
157
+
158
+ retriever = VectorIndexRetriever(index=index, similarity_top_k=3, vector_store_query_mode="hybrid", embed_model=embed_model, alpha=0.5)
159
+
160
+ current_collection = "BANDI_SISTEMA_PUGLIA"
161
+ return "collezione settata"
162
+
163
+ def select_collection(evt: gr.SelectData):
164
+ global current_collection
165
+ global retriever
166
+ global chat_engine_bandi
167
+ global chat_engine_bandi_sistema_puglia
168
+ global token_count_bandi
169
+ global token_count_bandi_sistema_puglia
170
+ global memory_bandi
171
+ global memory_bandi_sistema_puglia
172
+ selected_collection = evt.value
173
+
174
+ if(selected_collection != current_collection):
175
+ if(selected_collection == "BANDI_SISTEMA_PUGLIA"):
176
+ chat_engine_bandi.reset()
177
+ chat_engine_bandi_sistema_puglia.reset()
178
+ memory_bandi_sistema_puglia.reset()
179
+ memory_bandi.reset()
180
+ token_count_bandi = 0
181
+ token_count_bandi_sistema_puglia = 0
182
+ pc = Pinecone(api_key="7e412663-a2dc-44a6-ab57-25dd0bdce226")
183
+ # connect to index
184
+ pinecone_index = pc.Index("indexbandisistemapuglia")
185
+
186
+ vector_store = PineconeVectorStore(
187
+ pinecone_index=pinecone_index,
188
+ add_sparse_vector=True,
189
+ )
190
+
191
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
192
+
193
+ # load your index from stored vectors
194
+ index = VectorStoreIndex.from_vector_store(
195
+ vector_store, storage_context=storage_context
196
+ )
197
+
198
+ retriever = VectorIndexRetriever(index=index, similarity_top_k=3, vector_store_query_mode="hybrid", embed_model=embed_model, alpha=0.5)
199
+ else:
200
+ chat_engine_bandi.reset()
201
+ chat_engine_bandi_sistema_puglia.reset()
202
+ memory_bandi_sistema_puglia.reset()
203
+ memory_bandi.reset()
204
+ token_count_bandi = 0
205
+ token_count_bandi_sistema_puglia = 0
206
+ pc = Pinecone(api_key="7e412663-a2dc-44a6-ab57-25dd0bdce226")
207
+ # connect to index
208
+ pinecone_index = pc.Index("indexbandi")
209
+
210
+ vector_store = PineconeVectorStore(
211
+ pinecone_index=pinecone_index,
212
+ add_sparse_vector=True,
213
+ )
214
+
215
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
216
+
217
+ # load your index from stored vectors
218
+ index = VectorStoreIndex.from_vector_store(
219
+ vector_store, storage_context=storage_context
220
+ )
221
+
222
+ retriever = VectorIndexRetriever(index=index, similarity_top_k=3, vector_store_query_mode="hybrid", embed_model=embed_model, alpha=0.4)
223
+
224
+ current_collection = selected_collection
225
+
226
+ return "<div class='alert alert-success' role='alert'> Collezione "+selected_collection+" selezionata </div>"
227
+
228
+ def select_response_mode(evt: gr.SelectData):
229
+ global current_response_mode
230
+ current_response_mode = evt.value
231
+ return "<div class='alert alert-success' role='alert'>"+current_response_mode+" selezionato </div>"
232
+
233
+ def select_chat_mode():
234
+ global current_chat_mode
235
+ global memory_bandi
236
+ global memory_bandi_sistema_puglia
237
+ global chat_engine_bandi
238
+ global chat_engine_bandi_sistema_puglia
239
+ global token_count_bandi
240
+ global token_count_bandi_sistema_puglia
241
+ memory_bandi_sistema_puglia.reset()
242
+ memory_bandi.reset()
243
+ chat_engine_bandi.reset()
244
+ chat_engine_bandi_sistema_puglia.reset()
245
+ token_count_bandi = 0
246
+ token_count_bandi_sistema_puglia = 0
247
+ current_chat_mode = "CHAT"
248
+
249
+ return "<div class='alert alert-success' role='alert'>Hai selezionato la modalità "+current_chat_mode+" </div>"
250
+
251
+ def select_standard_mode():
252
+ global current_chat_mode
253
+ current_chat_mode = "STANDARD"
254
+ return "<div class='alert alert-success' role='alert'>Hai selezionato la modalità "+current_chat_mode+" </div>"
255
+
256
+ def set_chat_engine():
257
+ global chat_engine_bandi
258
+ global chat_engine_bandi_sistema_puglia
259
+ global memory_bandi
260
+ global memory_bandi_sistema_puglia
261
+ global token_count_bandi_sistema_puglia
262
+ global token_count_bandi
263
+ memory_bandi = ChatMemoryBuffer.from_defaults(token_limit=5000)
264
+ memory_bandi_sistema_puglia = ChatMemoryBuffer.from_defaults(token_limit=3000)
265
+
266
+ pc = Pinecone(api_key="7e412663-a2dc-44a6-ab57-25dd0bdce226")
267
+ pinecone_index = pc.Index("indexbandi")
268
+ vector_store = PineconeVectorStore(
269
+ pinecone_index=pinecone_index,
270
+ add_sparse_vector=True,
271
+ )
272
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
273
+ index = VectorStoreIndex.from_vector_store(
274
+ vector_store, storage_context=storage_context
275
+ )
276
+
277
+ retriever_bandi = VectorIndexRetriever(index=index, similarity_top_k=3, vector_store_query_mode="hybrid", embed_model=embed_model, alpha=0.5)
278
+ chat_engine_bandi = ContextChatEngine(retriever=retriever_bandi,
279
+ context_template="Sei un chatbot in grado di rispondere alle domande su bandi regionali e avvisi della regione Puglia. Hai accesso ai bandi della regione Puglia. Qui sotto le informazioni di contesto recuperate. \n"
280
+ "---------------------\n"
281
+ "Informazioni di contesto: "+"{context_str}\n"
282
+ "---------------------\n"
283
+ "Usa le informazioni di contesto sopra fornite e non la tua conoscenza pregressa per rispondere, l'unica regione che conosci è la regione Puglia. "
284
+ "rispondi sempre alla seguente query sul bando regionale della Puglia usando le informazioni di contesto."
285
+ "\n", llm=llm, memory=memory_bandi, prefix_messages=[])
286
+
287
+ pinecone_index = pc.Index("indexbandisistemapuglia")
288
+ vector_store = PineconeVectorStore(
289
+ pinecone_index=pinecone_index,
290
+ add_sparse_vector=True,
291
+ )
292
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
293
+ index = VectorStoreIndex.from_vector_store(
294
+ vector_store, storage_context=storage_context
295
+ )
296
+ retriever_bandi_sistema_puglia = VectorIndexRetriever(index=index, similarity_top_k=3, vector_store_query_mode="hybrid", embed_model=embed_model, alpha=0.5)
297
+
298
+ chat_engine_bandi_sistema_puglia = ContextChatEngine(retriever=retriever_bandi_sistema_puglia,
299
+ context_template="Sei un chatbot in grado di rispondere alle domande su bandi regionali e avvisi della regione Puglia. Hai accesso ai bandi della regione Puglia. Qui sotto le informazioni di contesto recuperate. \n"
300
+ "---------------------\n"
301
+ "Informazioni di contesto: "+"{context_str}\n"
302
+ "---------------------\n"
303
+ "Usa le informazioni di contesto sopra fornite e non la tua conoscenza pregressa per rispondere, l'unica regione che conosci è la regione Puglia. "
304
+ "rispondi sempre alla seguente query sul bando regionale della Puglia usando le informazioni di contesto."
305
+ "\n", llm=llm, memory=memory_bandi_sistema_puglia, prefix_messages=[])
306
+
307
+
308
+ def html_escape(text):
309
+ html_entities = {
310
+ 'à': '&agrave;',
311
+ 'è': '&egrave;',
312
+ 'é': '&eacute;',
313
+ 'ì': '&igrave;',
314
+ 'ò': '&ograve;',
315
+ 'ù': '&ugrave;',
316
+ 'À': '&Agrave;',
317
+ 'È': '&Egrave;',
318
+ 'É': '&Eacute;',
319
+ 'Ì': '&Igrave;',
320
+ 'Ò': '&Ograve;',
321
+ 'Ù': '&Ugrave;',
322
+ 'ç': '&ccedil;',
323
+ 'Ç': '&Ccedil;',
324
+ 'ä': '&auml;',
325
+ 'ö': '&ouml;',
326
+ 'ü': '&uuml;',
327
+ 'Ä': '&Auml;',
328
+ 'Ö': '&Ouml;',
329
+ 'Ü': '&Uuml;',
330
+ 'ß': '&szlig;',
331
+ 'ñ': '&ntilde;',
332
+ 'Ñ': '&Ntilde;',
333
+ 'œ': '&oelig;',
334
+ 'Œ': '&OElig;',
335
+ 'æ': '&aelig;',
336
+ 'Æ': '&AElig;',
337
+ 'ø': '&oslash;',
338
+ 'Ø': '&Oslash;',
339
+ 'å': '&aring;',
340
+ 'Å': '&Aring;',
341
+ '&': '&amp;',
342
+ '<': '&lt;',
343
+ '>': '&gt;',
344
+ '"': '&quot;',
345
+ "'": '&#39;'
346
+ }
347
+ return ''.join(html_entities.get(c, c) for c in text)
348
+
349
+ def reset():
350
+ global chat_engine_bandi
351
+ global chat_engine_bandi_sistema_puglia
352
+ global memory_bandi
353
+ global memory_bandi_sistema_puglia
354
+ global token_count_bandi
355
+ global token_count_bandi_sistema_puglia
356
+ chat_engine_bandi.reset()
357
+ chat_engine_bandi_sistema_puglia.reset()
358
+ memory_bandi_sistema_puglia.reset()
359
+ memory_bandi.reset()
360
+ token_count_bandi = 0
361
+ token_count_bandi_sistema_puglia = 0
362
+ return "<div class='alert alert-success' role='alert'>Cronologia chat eliminata</div>"
363
+
364
+ select_initial_collection()
365
+ set_chat_engine()
366
+
367
+ with gr.Blocks(css=css, head=head) as demo:
368
+
369
+ with gr.Row():
370
+ output = gr.HTML()
371
+ with gr.Row(elem_id="buttonChat"):
372
+ gr.Button("STANDARD", size="sm").click(fn=select_standard_mode, outputs=output)
373
+ gr.Button("CHAT",size="sm").click(fn=select_chat_mode, outputs=output)
374
+
375
+ chatbot = gr.Chatbot(elem_id="chatbot", container=False)
376
+
377
+ with gr.Column(elem_id="colonnaElementi"):
378
+ gr.Dropdown(
379
+ ["BANDI_SISTEMA_PUGLIA","BANDI"], label="Collezione di documenti", info="", container=False, interactive=True, value="BANDI_SISTEMA_PUGLIA", elem_id="dropdown"
380
+ ).select(fn=select_collection, outputs=output)
381
+
382
+ gr.Radio(["compact","tree_summarize"], label="Response mode", info="Influenzerà il modo in cui il chatbot risponde", interactive=True,container=False, value="compact",elem_id="responseMode").select(fn=select_response_mode, outputs=output),
383
+ divDocumenti = gr.HTML("<div id='divDocumenti'></div>")
384
+ msg = gr.Textbox(elem_id="textBox", container=False)
385
+ clear = gr.ClearButton([msg, chatbot], elem_id="btnClear")
386
+ clear.click(fn=reset, outputs=output)
387
+
388
+ def user(userMessage, history):
389
+ global user_message
390
+ user_message = userMessage
391
+ if history is None:
392
+ history = []
393
+
394
+ return "", history + [[user_message, None]]
395
+
396
+ def getStreamResponse(type_engine, engine, message):
397
+ response = None
398
+ if(type_engine=="CHAT"):
399
+ response = engine.stream_chat(message)
400
+ else:
401
+ response = engine.query(message)
402
+ return response
403
+
404
+ @spaces.GPU(duration=120)
405
+ def bot(history):
406
+ global chat_engine_bandi
407
+ global chat_engine_bandi_sistema_puglia
408
+ global memory_bandi
409
+ global memory_bandi_sistema_puglia
410
+ global current_response_mode
411
+ global current_collection
412
+ global retriever
413
+ global file_path
414
+ global current_chat_mode
415
+ global token_count_bandi
416
+ global token_count_bandi_sistema_puglia
417
+ global user_message
418
+
419
+ if(current_chat_mode=="CHAT"):
420
+ if(current_collection=="BANDI"):
421
+ if(token_count_bandi >= 1000):
422
+ print("RESET!!!")
423
+ token_count_bandi = 0
424
+ memory_bandi.reset()
425
+ chat_engine_bandi.reset()
426
+ print(chat_engine_bandi.chat_history)
427
+ print(memory_bandi)
428
+ stream_response = chat_engine_bandi.stream_chat(user_message)
429
+ history[-1][1] = ""
430
+ for character in stream_response.response_gen:
431
+ tokens = character.split(" ")
432
+ num_tokens = len(tokens)
433
+ token_count_bandi = token_count_bandi + num_tokens
434
+ print(token_count_bandi)
435
+ history[-1][1] += html_escape(str(character))
436
+ time.sleep(0.05)
437
+ yield history, "<p>"
438
+
439
+
440
+ responseHTML = ""
441
+ for i, node in enumerate(stream_response.source_nodes):
442
+ responseHTML += "<p><b>"+node.metadata['nome_bando']+"</b><a href='"+node.metadata['file_path']+"' download> <svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' fill='currentColor' class='bi bi-download' viewBox='0 0 16 16'><path d='M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5'/><path d='M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708z'/> </svg></a><br>Nodo <span id='span"+str(i+1)+"' class='icon' onclick='toggleText(this.id)'>🔍</span> <!-- Text to show/hide --><p class='hidden-text' id='nodo"+str(i+1)+"'>"+node.text+"</p>"
443
+ yield history, responseHTML
444
+
445
+ else:
446
+ if(token_count_bandi_sistema_puglia >= 1000):
447
+ print("RESET!!!")
448
+ token_count_bandi_sistema_puglia = 0
449
+ memory_bandi_sistema_puglia.reset()
450
+ chat_engine_bandi_sistema_puglia.reset()
451
+ print(chat_engine_bandi_sistema_puglia.chat_history)
452
+ print(memory_bandi_sistema_puglia)
453
+ stream_response = chat_engine_bandi_sistema_puglia.stream_chat(user_message)
454
+
455
+ history[-1][1] = ""
456
+ for character in stream_response.response_gen:
457
+ tokens = character.split(" ")
458
+ num_tokens = len(tokens)
459
+ token_count_bandi_sistema_puglia = token_count_bandi_sistema_puglia + num_tokens
460
+ print(token_count_bandi_sistema_puglia)
461
+ history[-1][1] += html_escape(str(character))
462
+ time.sleep(0.05)
463
+ yield history,"<p>"
464
+
465
+ responseHTML = ""
466
+ for i, node in enumerate(stream_response.source_nodes):
467
+ responseHTML += "<p><b>"+node.metadata['nome_bando']+"</b><a href='"+node.metadata['file_path']+"' download> <svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' fill='currentColor' class='bi bi-download' viewBox='0 0 16 16'><path d='M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5'/><path d='M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708z'/> </svg></a><br>Nodo <span id='span"+str(i+1)+"' class='icon' onclick='toggleText(this.id)'>🔍</span> <!-- Text to show/hide --><p class='hidden-text' id='nodo"+str(i+1)+"'>"+node.text+"</p>"
468
+ yield history, responseHTML
469
+
470
+ else:
471
+ if(str(current_response_mode)=="tree_summarize"):
472
+ # define response synthesizer
473
+ response_synthesizer = get_response_synthesizer(streaming=True,response_mode="tree_summarize",text_qa_template=text_qa_template)
474
+ query_engine = RetrieverQueryEngine(retriever=retriever, response_synthesizer=response_synthesizer)
475
+ stream_response = query_engine.query(user_message)
476
+ history[-1][1] = ""
477
+ for character in stream_response.response_gen:
478
+ history[-1][1] += html_escape(str(character))
479
+ time.sleep(0.05)
480
+ yield history ,"<p>"
481
+
482
+ responseHTML = ""
483
+ for i, node in enumerate(stream_response.source_nodes):
484
+ responseHTML += "<p><b>"+node.metadata['nome_bando']+"</b><a href='"+node.metadata['file_path']+"' download> <svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' fill='currentColor' class='bi bi-download' viewBox='0 0 16 16'><path d='M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5'/><path d='M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708z'/> </svg></a><br>Nodo <span id='span"+str(i+1)+"' class='icon' onclick='toggleText(this.id)'>🔍</span> <!-- Text to show/hide --><p class='hidden-text' id='nodo"+str(i+1)+"'>"+node.text+"</p>"
485
+
486
+ yield history, responseHTML
487
+ else:
488
+ # define response synthesizer
489
+ response_synthesizer = get_response_synthesizer(streaming=True,response_mode="compact",text_qa_template=text_qa_template, refine_template=refine_template)
490
+ query_engine = RetrieverQueryEngine(retriever=retriever, response_synthesizer=response_synthesizer)
491
+ stream_response = query_engine.query(user_message)
492
+
493
+ history[-1][1] = ""
494
+ for character in stream_response.response_gen:
495
+ history[-1][1] += html_escape(str(character))
496
+ time.sleep(0.05)
497
+ yield history,"<p>"
498
+
499
+ responseHTML = ""
500
+
501
+ for i, node in enumerate(stream_response.source_nodes):
502
+ responseHTML += "<p><b>"+node.metadata['nome_bando']+"</b><a href='"+node.metadata['file_path']+"' download> <svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' fill='currentColor' class='bi bi-download' viewBox='0 0 16 16'><path d='M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5'/><path d='M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708z'/> </svg></a><br>Nodo <span id='span"+str(i+1)+"' class='icon' onclick='toggleText(this.id)'>🔍</span> <!-- Text to show/hide --><p class='hidden-text' id='nodo"+str(i+1)+"'>"+node.text+"</p>"
503
+
504
+ yield history, responseHTML
505
+
506
+ torch.cuda.empty_cache()
507
+ torch.cuda.reset_max_memory_allocated()
508
+ torch.cuda.reset_max_memory_cached()
509
+
510
+
511
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
512
+ bot, chatbot, [chatbot, divDocumenti]
513
+ )
514
+
515
+ demo.queue()
516
+ demo.launch(debug=True, share=True)
517
+
518
+ if __name__ == "__main__":
519
+ main()