Hristo ZHANG 张鹤立 commited on
Commit
0284d5c
1 Parent(s): 1315b88

add app.py

Browse files
Files changed (2) hide show
  1. app.py +25 -0
  2. requirements.txt +2 -0
app.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from llama_cpp import Llama
3
+ import os
4
+ model_file = "Yi-6B.q4_k_m.gguf"
5
+ if not os.path.isfile("Yi-6B.q4_k_m.gguf"):
6
+ os.system("wget -c https://huggingface.co/SamPurkis/Yi-6B-GGUF/resolve/main/Yi-6B.q4_k_m.gguf")
7
+
8
+ llm = Llama(model_path=model_file)
9
+
10
+ def generate_text(input_text):
11
+ output = llm(f"Human: {input_text} A:", max_tokens=512, stop=["Assistant:", "\n"], echo=True)
12
+ return output['choices'][0]['text']
13
+
14
+ input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
15
+ output_text = gr.outputs.Textbox(label="Output text")
16
+
17
+ description = "llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
18
+
19
+ examples = [
20
+ ["What is the capital of France? ", "The capital of France is Paris."],
21
+ ["Who wrote the novel 'Pride and Prejudice'?", "The novel 'Pride and Prejudice' was written by Jane Austen."],
22
+ ["What is the square root of 64?", "The square root of 64 is 8."]
23
+ ]
24
+
25
+ gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="Llama Language Model", description=description, examples=examples).launch()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ llama_cpp
2
+ gradio