Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,35 +1,35 @@
|
|
1 |
-
from huggingface_hub import InferenceClient
|
2 |
|
3 |
-
model_name = "Qwen/Qwen2.5-72B-Instruct"
|
4 |
|
5 |
-
client = InferenceClient(model_name)
|
6 |
|
7 |
-
def llm_inference(user_sample):
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
|
27 |
-
import gradio as gr
|
28 |
|
29 |
-
interface = gr.Interface(fn=llm_inference,
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
|
35 |
-
interface.launch(debug=True)
|
|
|
1 |
+
# from huggingface_hub import InferenceClient
|
2 |
|
3 |
+
# model_name = "Qwen/Qwen2.5-72B-Instruct"
|
4 |
|
5 |
+
# client = InferenceClient(model_name)
|
6 |
|
7 |
+
# def llm_inference(user_sample):
|
8 |
+
# output = client.chat.completions.create(
|
9 |
+
# messages=[
|
10 |
+
# {"role": "system", "content": "you are a university english grammar teacher\n"
|
11 |
+
# "answer the questions based on english grammar, IELTS (International English Language Testing System)\n"
|
12 |
+
# "and top-rated universities in english speaking countries especially where english is an official language\n"
|
13 |
+
# "if there are other questions which do not connected with these topics: grammar, IELTS and top-rated English universities write that you cannot provide an answer\n"
|
14 |
+
# "answer in a formal way using B2-C1 (upper-intermediate and advanced level of English) without colloquial phrases, slang and so on\n"
|
15 |
+
# "your answer must be no more than 50 words"
|
16 |
+
# },
|
17 |
+
# {"role": "user",
|
18 |
+
# "content": f"answer the question based on these topics: english grammar, IELTS and top-rated universities {user_sample}"},
|
19 |
+
# ],
|
20 |
+
# stream=False,
|
21 |
+
# max_tokens=128,
|
22 |
+
# temperature=0.5,
|
23 |
+
# top_p=0.1
|
24 |
+
# )
|
25 |
+
# return output.choices[0].get('message')['content']
|
26 |
|
27 |
+
# import gradio as gr
|
28 |
|
29 |
+
# interface = gr.Interface(fn=llm_inference,
|
30 |
+
# inputs=gr.Textbox(lines=2, placeholder="Write your question here..."),
|
31 |
+
# outputs="text",
|
32 |
+
# css=".gradio-container {background-image: url('https://i.pinimg.com/originals/9b/6a/a8/9b6aa8867dbe29f2d475b7a550e06490.jpg')}",
|
33 |
+
# title="ASK A QUESTION BASED ON ENGLISH GRAMMAR, IELTS OR TOP-RATED UNIVERSITIES")
|
34 |
|
35 |
+
# interface.launch(debug=True)
|