Spaces:
Runtime error
Runtime error
halimbahae
commited on
Commit
•
c716ed5
1
Parent(s):
6487a08
Update app.py
Browse files
app.py
CHANGED
@@ -1,98 +1,37 @@
|
|
1 |
-
import
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
4 |
-
#
|
5 |
-
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
6 |
|
7 |
-
|
8 |
-
|
9 |
-
response = client.chat_completion(
|
10 |
-
[{"role": "system", "content": system_message}, {"role": "user", "content": chat_message}],
|
11 |
-
max_tokens=512,
|
12 |
-
temperature=0.7,
|
13 |
-
top_p=0.95
|
14 |
-
).choices[0].message.content
|
15 |
-
return response
|
16 |
|
17 |
-
|
18 |
-
|
|
|
19 |
response = client.chat_completion(
|
20 |
-
[{"role": "system", "content": system_message}, {"role": "user", "content":
|
21 |
max_tokens=512,
|
22 |
temperature=0.7,
|
23 |
top_p=0.95
|
24 |
).choices[0].message.content
|
25 |
return response
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
).choices[0].message.content
|
45 |
-
return response
|
46 |
-
|
47 |
-
def generation_questions_exercice(chat_message):
|
48 |
-
system_message = "Aidez à générer des questions d'exercice basées sur la conversation suivante."
|
49 |
-
response = client.chat_completion(
|
50 |
-
[{"role": "system", "content": system_message}, {"role": "user", "content": chat_message}],
|
51 |
-
max_tokens=512,
|
52 |
-
temperature=0.7,
|
53 |
-
top_p=0.95
|
54 |
-
).choices[0].message.content
|
55 |
-
return response
|
56 |
-
|
57 |
-
# Définir l'interface Gradio
|
58 |
-
with gr.Blocks() as demo:
|
59 |
-
gr.Markdown("""
|
60 |
-
# Eddy Teacher Assistant 🌟
|
61 |
-
|
62 |
-
Bienvenue dans l'Assistant Enseignant Eddy ! Cet outil offre plusieurs fonctionnalités pour vous aider dans votre enseignement, que ce soit pour créer du matériel pédagogique, des devoirs, des lettres de recommandation, brainstormer des idées ou générer des questions d'exercice.
|
63 |
-
""")
|
64 |
-
|
65 |
-
with gr.Tab("Créer du matériel pédagogique 📚"):
|
66 |
-
with gr.Row():
|
67 |
-
chat_message = gr.Textbox(label="Entrez votre message", lines=5)
|
68 |
-
response = gr.Textbox(label="Réponse", interactive=False, lines=15, max_lines=50)
|
69 |
-
gr.Button("Envoyer").click(creer_materiel_pedagogique, chat_message, response)
|
70 |
-
|
71 |
-
with gr.Tab("Création de devoirs 📝"):
|
72 |
-
with gr.Row():
|
73 |
-
chat_message = gr.Textbox(label="Entrez votre message", lines=5)
|
74 |
-
response = gr.Textbox(label="Réponse", interactive=False, lines=15, max_lines=50)
|
75 |
-
gr.Button("Envoyer").click(creation_devoir, chat_message, response)
|
76 |
-
|
77 |
-
with gr.Tab("Lettres de recommandation ✒️"):
|
78 |
-
with gr.Row():
|
79 |
-
chat_message = gr.Textbox(label="Entrez votre message", lines=5)
|
80 |
-
response = gr.Textbox(label="Réponse", interactive=False, lines=15, max_lines=50)
|
81 |
-
gr.Button("Envoyer").click(lettre_recommandation, chat_message, response)
|
82 |
-
|
83 |
-
with gr.Tab("Brainstorming d'idées 💡"):
|
84 |
-
with gr.Row():
|
85 |
-
chat_message = gr.Textbox(label="Entrez votre message", lines=5)
|
86 |
-
response = gr.Textbox(label="Réponse", interactive=False, lines=15, max_lines=50)
|
87 |
-
gr.Button("Envoyer").click(brainstorming_idees, chat_message, response)
|
88 |
-
|
89 |
-
with gr.Tab("Génération de questions d'exercice ❓"):
|
90 |
-
with gr.Row():
|
91 |
-
chat_message = gr.Textbox(label="Entrez votre message", lines=5)
|
92 |
-
response = gr.Textbox(label="Réponse", interactive=False, lines=15, max_lines=50)
|
93 |
-
gr.Button("Envoyer").click(generation_questions_exercice, chat_message, response)
|
94 |
-
|
95 |
-
gr.Markdown("---\nConstruit avec ❤️ par [Bahae Eddine HALIM](https://www.linkedin.com/in/halimbahae/)")
|
96 |
-
|
97 |
-
if __name__ == "__main__":
|
98 |
-
demo.launch(share=True)
|
|
|
1 |
+
import streamlit as st
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
4 |
+
# Initialize the HuggingFace inference client
|
5 |
+
client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta", token=st.secrets["HUGGINGFACE_API_KEY"])
|
6 |
|
7 |
+
# Set up the Streamlit app
|
8 |
+
st.title("DarijaBot")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
+
# Function to interact with the HuggingFace model
|
11 |
+
def darijabot_response(user_message):
|
12 |
+
system_message = "Aidez à répondre à la question suivante en darija marocain."
|
13 |
response = client.chat_completion(
|
14 |
+
[{"role": "system", "content": system_message}, {"role": "user", "content": user_message}],
|
15 |
max_tokens=512,
|
16 |
temperature=0.7,
|
17 |
top_p=0.95
|
18 |
).choices[0].message.content
|
19 |
return response
|
20 |
|
21 |
+
# Input for user message
|
22 |
+
user_message = st.text_input("You:", "")
|
23 |
+
|
24 |
+
# Check if the user has entered a message
|
25 |
+
if user_message:
|
26 |
+
try:
|
27 |
+
bot_response = darijabot_response(user_message)
|
28 |
+
st.text_area("DarijaBot:", bot_response, height=200)
|
29 |
+
except Exception as e:
|
30 |
+
st.error(f"Error: Unable to fetch response from the API.\nDetails: {e}")
|
31 |
+
|
32 |
+
# Instructions for users
|
33 |
+
st.write("""
|
34 |
+
## Instructions
|
35 |
+
- Enter your message in Darija (Moroccan Arabic) using the Latin alphabet.
|
36 |
+
- The chatbot will respond to your messages.
|
37 |
+
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|