Al-Alcoba-Inciarte commited on
Commit
6daa349
1 Parent(s): 3eb7675

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -2
app.py CHANGED
@@ -5,8 +5,54 @@ from haystack.components.generators import HuggingFaceTGIGenerator
5
  generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
  generator.warm_up()
7
 
8
- def respond(prompt):
9
- result = generator.run(prompt, generation_kwargs={"max_new_tokens": 350})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  return result["replies"][0]
11
 
12
  iface = gr.Interface(fn=respond, inputs="text", outputs="text")
 
5
  generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
  generator.warm_up()
7
 
8
+ from haystack.components.fetchers.link_content import LinkContentFetcher
9
+ from haystack.components.converters import HTMLToDocument
10
+ from haystack.components.preprocessors import DocumentSplitter
11
+ from haystack.components.rankers import TransformersSimilarityRanker
12
+ from haystack.components.generators import GPTGenerator
13
+ from haystack.components.builders.prompt_builder import PromptBuilder
14
+ from haystack import Pipeline
15
+
16
+ fetcher = LinkContentFetcher()
17
+ converter = HTMLToDocument()
18
+ document_splitter = DocumentSplitter(split_by="word", split_length=50)
19
+ similarity_ranker = TransformersSimilarityRanker(top_k=3)
20
+
21
+ prompt_template = """
22
+ According to these documents:
23
+
24
+ {% for doc in documents %}
25
+ {{ doc.content }}
26
+ {% endfor %}
27
+
28
+ Answer the given question: {{question}}
29
+ Answer:
30
+ """
31
+ prompt_builder = PromptBuilder(template=prompt_template)
32
+
33
+ pipeline = Pipeline()
34
+ pipeline.add_component("fetcher", fetcher)
35
+ pipeline.add_component("converter", converter)
36
+ pipeline.add_component("splitter", document_splitter)
37
+ pipeline.add_component("ranker", similarity_ranker)
38
+ pipeline.add_component("prompt_builder", prompt_builder)
39
+ pipeline.add_component("llm", generator)
40
+
41
+ pipeline.connect("fetcher.streams", "converter.sources")
42
+ pipeline.connect("converter.documents", "splitter.documents")
43
+ pipeline.connect("splitter.documents", "ranker.documents")
44
+ pipeline.connect("ranker.documents", "prompt_builder.documents")
45
+ pipeline.connect("prompt_builder.prompt", "llm.prompt")
46
+
47
+ def respond(prompt, use_rag):
48
+ if use_rag:
49
+ result = pipeline.run({"prompt_builder": {"question": question},
50
+ "ranker": {"query": question},
51
+ "fetcher": {"urls": ["https://haystack.deepset.ai/blog/introducing-haystack-2-beta-and-advent"]},
52
+ "llm":{"generation_kwargs": {"max_new_tokens": 350}}})
53
+ return result['llm']['replies'][0]
54
+ else:
55
+ result = generator.run(prompt, generation_kwargs={"max_new_tokens": 350})
56
  return result["replies"][0]
57
 
58
  iface = gr.Interface(fn=respond, inputs="text", outputs="text")