import streamlit as st from huggingface_hub import InferenceClient # Initialize the HuggingFace inference client client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta", token=st.secrets["HUGGINGFACE_API_KEY"]) # Set up the Streamlit app st.title("DarijaBot") # Function to interact with the HuggingFace model def darijabot_response(user_message): system_message = "Aidez à répondre à la question suivante en darija marocain." response = client.chat_completion( [{"role": "system", "content": system_message}, {"role": "user", "content": user_message}], max_tokens=512, temperature=0.7, top_p=0.95 ).choices[0].message.content return response # Input for user message user_message = st.text_input("You:", "") # Check if the user has entered a message if user_message: try: bot_response = darijabot_response(user_message) st.text_area("DarijaBot:", bot_response, height=200) except Exception as e: st.error(f"Error: Unable to fetch response from the API.\nDetails: {e}") # Instructions for users st.write(""" ## Instructions - Enter your message in Darija (Moroccan Arabic) using the Latin alphabet. - The chatbot will respond to your messages. """)