Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -44,10 +44,11 @@ def transliterate_to_sinhala(text):
|
|
44 |
return transliterate.process('Velthuis', 'Sinhala', text)
|
45 |
|
46 |
# Placeholder for conversation model loading and pipeline setup
|
|
|
47 |
|
48 |
-
def conversation_predict(text):
|
49 |
-
|
50 |
-
|
51 |
|
52 |
def ai_predicted(user_input):
|
53 |
if user_input.lower() == 'exit':
|
@@ -56,8 +57,8 @@ def ai_predicted(user_input):
|
|
56 |
user_input = translate_Singlish_to_sinhala(user_input)
|
57 |
user_input = transliterate_to_sinhala(user_input)
|
58 |
user_input = translate_sinhala_to_english(user_input)
|
59 |
-
|
60 |
-
ai_response = conversation_predict(user_input)
|
61 |
ai_response_lines = ai_response.split("</s>")
|
62 |
|
63 |
response = translate_english_to_sinhala(ai_response_lines[-1])
|
@@ -83,6 +84,8 @@ def respond(
|
|
83 |
|
84 |
messages.append({"role": "user", "content": message})
|
85 |
|
|
|
|
|
86 |
response = ai_predicted(message)
|
87 |
|
88 |
yield response
|
|
|
44 |
return transliterate.process('Velthuis', 'Sinhala', text)
|
45 |
|
46 |
# Placeholder for conversation model loading and pipeline setup
|
47 |
+
pipe1 = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
48 |
|
49 |
+
# def conversation_predict(text):
|
50 |
+
# interface = gr.Interface.load("microsoft/Phi-3-mini-4k-instruct")
|
51 |
+
# return interface([text])[0]
|
52 |
|
53 |
def ai_predicted(user_input):
|
54 |
if user_input.lower() == 'exit':
|
|
|
57 |
user_input = translate_Singlish_to_sinhala(user_input)
|
58 |
user_input = transliterate_to_sinhala(user_input)
|
59 |
user_input = translate_sinhala_to_english(user_input)
|
60 |
+
ai_response = pipe1([{"role": "user", "content": user_input})
|
61 |
+
# ai_response = conversation_predict(user_input)
|
62 |
ai_response_lines = ai_response.split("</s>")
|
63 |
|
64 |
response = translate_english_to_sinhala(ai_response_lines[-1])
|
|
|
84 |
|
85 |
messages.append({"role": "user", "content": message})
|
86 |
|
87 |
+
|
88 |
+
|
89 |
response = ai_predicted(message)
|
90 |
|
91 |
yield response
|