Al-Alcoba-Inciarte commited on
Commit
420e6d4
1 Parent(s): 89a5390

Update app_backup.py

Browse files
Files changed (1) hide show
  1. app_backup.py +57 -0
app_backup.py CHANGED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ from haystack.components.generators import HuggingFaceTGIGenerator
4
+
5
+ generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
+ generator.warm_up()
7
+
8
+ from haystack.components.fetchers.link_content import LinkContentFetcher
9
+ from haystack.components.converters import HTMLToDocument
10
+ from haystack.components.preprocessors import DocumentSplitter
11
+ from haystack.components.rankers import TransformersSimilarityRanker
12
+ from haystack.components.generators import GPTGenerator
13
+ from haystack.components.builders.prompt_builder import PromptBuilder
14
+ from haystack import Pipeline
15
+
16
+ fetcher = LinkContentFetcher()
17
+ converter = HTMLToDocument()
18
+ document_splitter = DocumentSplitter(split_by="word", split_length=50)
19
+ similarity_ranker = TransformersSimilarityRanker(top_k=3)
20
+
21
+ prompt_template = """
22
+ According to these documents:
23
+ {% for doc in documents %}
24
+ {{ doc.content }}
25
+ {% endfor %}
26
+ Answer the given question: {{question}}
27
+ Answer:
28
+ """
29
+ prompt_builder = PromptBuilder(template=prompt_template)
30
+
31
+ pipeline = Pipeline()
32
+ pipeline.add_component("fetcher", fetcher)
33
+ pipeline.add_component("converter", converter)
34
+ pipeline.add_component("splitter", document_splitter)
35
+ pipeline.add_component("ranker", similarity_ranker)
36
+ pipeline.add_component("prompt_builder", prompt_builder)
37
+ pipeline.add_component("llm", generator)
38
+
39
+ pipeline.connect("fetcher.streams", "converter.sources")
40
+ pipeline.connect("converter.documents", "splitter.documents")
41
+ pipeline.connect("splitter.documents", "ranker.documents")
42
+ pipeline.connect("ranker.documents", "prompt_builder.documents")
43
+ pipeline.connect("prompt_builder.prompt", "llm.prompt")
44
+
45
+ def respond(prompt, use_rag):
46
+ if use_rag:
47
+ result = pipeline.run({"prompt_builder": {"question": prompt},
48
+ "ranker": {"query": prompt},
49
+ "fetcher": {"urls": ["https://haystack.deepset.ai/blog/introducing-haystack-2-beta-and-advent"]},
50
+ "llm":{"generation_kwargs": {"max_new_tokens": 350}}})
51
+ return result['llm']['replies'][0]
52
+ else:
53
+ result = generator.run(prompt, generation_kwargs={"max_new_tokens": 350})
54
+ return result["replies"][0]
55
+
56
+ iface = gr.Interface(fn=respond, inputs=["text", "checkbox"], outputs="text")
57
+ iface.launch()