Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -44,11 +44,12 @@ def transliterate_to_sinhala(text):
|
|
44 |
return transliterate.process('Velthuis', 'Sinhala', text)
|
45 |
|
46 |
# Placeholder for conversation model loading and pipeline setup
|
47 |
-
pipe1 = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
|
|
|
52 |
|
53 |
def ai_predicted(user_input):
|
54 |
if user_input.lower() == 'exit':
|
@@ -57,8 +58,8 @@ def ai_predicted(user_input):
|
|
57 |
user_input = translate_Singlish_to_sinhala(user_input)
|
58 |
user_input = transliterate_to_sinhala(user_input)
|
59 |
user_input = translate_sinhala_to_english(user_input)
|
60 |
-
ai_response = pipe1([{"role": "user", "content": user_input}])
|
61 |
-
|
62 |
ai_response_lines = ai_response.split("</s>")
|
63 |
|
64 |
response = translate_english_to_sinhala(ai_response_lines[-1])
|
|
|
44 |
return transliterate.process('Velthuis', 'Sinhala', text)
|
45 |
|
46 |
# Placeholder for conversation model loading and pipeline setup
|
47 |
+
# pipe1 = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
48 |
|
49 |
+
interface = gr.Interface.load("microsoft/Phi-3-mini-4k-instruct")
|
50 |
+
|
51 |
+
def conversation_predict(text):
|
52 |
+
return interface([text])[0]
|
53 |
|
54 |
def ai_predicted(user_input):
|
55 |
if user_input.lower() == 'exit':
|
|
|
58 |
user_input = translate_Singlish_to_sinhala(user_input)
|
59 |
user_input = transliterate_to_sinhala(user_input)
|
60 |
user_input = translate_sinhala_to_english(user_input)
|
61 |
+
# ai_response = pipe1([{"role": "user", "content": user_input}])
|
62 |
+
ai_response = conversation_predict(user_input)
|
63 |
ai_response_lines = ai_response.split("</s>")
|
64 |
|
65 |
response = translate_english_to_sinhala(ai_response_lines[-1])
|