Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -38,11 +38,11 @@ def read_root(data: req):
|
|
38 |
input_text = data.prompt
|
39 |
|
40 |
# Tokenize the input text
|
41 |
-
|
42 |
|
43 |
# Generate output using the model
|
44 |
-
|
45 |
-
generated_text = tokenizer.decode(
|
46 |
|
47 |
answer_data = { "answer": generated_text }
|
48 |
print("Answer:", generated_text)
|
|
|
38 |
input_text = data.prompt
|
39 |
|
40 |
# Tokenize the input text
|
41 |
+
inputs = tokenizer.encode_plus(history_string, input_text, return_tensors="pt")
|
42 |
|
43 |
# Generate output using the model
|
44 |
+
outputs = model.generate(**inputs)
|
45 |
+
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True).strip()
|
46 |
|
47 |
answer_data = { "answer": generated_text }
|
48 |
print("Answer:", generated_text)
|