Spaces:
Paused
Paused
ranamhamoud
commited on
Commit
โข
260591e
1
Parent(s):
8a8cdf5
Update app.py
Browse files
app.py
CHANGED
@@ -40,7 +40,35 @@ if torch.cuda.is_available():
|
|
40 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
41 |
tokenizer.pad_token = tokenizer.eos_token
|
42 |
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
def make_prompt(entry):
|
45 |
return f"### Human: YOUR INSTRUCTION HERE,ONLY TELL A STORY,INCLUDE AT LEAST AN MCQ, FILL IN THE BLANK AND TRUE OR FALSE: {entry} ### Assistant:"
|
46 |
|
@@ -87,6 +115,10 @@ def generate(
|
|
87 |
for text in streamer:
|
88 |
outputs.append(text)
|
89 |
yield "".join(outputs)
|
|
|
|
|
|
|
|
|
90 |
|
91 |
|
92 |
chat_interface = gr.ChatInterface(
|
|
|
40 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
41 |
tokenizer.pad_token = tokenizer.eos_token
|
42 |
|
43 |
+
def save_chat_history(chat_history):
|
44 |
+
# Ensure the directory exists
|
45 |
+
os.makedirs(os.path.dirname('data/'), exist_ok=True)
|
46 |
+
file_path = 'data/chat_history.json'
|
47 |
+
|
48 |
+
# Generate a unique ID for the conversation
|
49 |
+
conversation_id = str(uuid.uuid4())
|
50 |
+
|
51 |
+
# Prepare the conversation entry
|
52 |
+
conversation_entry = {
|
53 |
+
"id": conversation_id,
|
54 |
+
"chat_history": chat_history
|
55 |
+
}
|
56 |
+
|
57 |
+
# Load existing data if the file exists
|
58 |
+
if os.path.exists(file_path):
|
59 |
+
with open(file_path, 'r') as file:
|
60 |
+
data = json.load(file)
|
61 |
+
else:
|
62 |
+
data = []
|
63 |
+
|
64 |
+
# Append the new conversation entry
|
65 |
+
data.append(conversation_entry)
|
66 |
+
|
67 |
+
# Save the updated data back to the file
|
68 |
+
with open(file_path, 'w') as file:
|
69 |
+
json.dump(data, file, indent=4)
|
70 |
+
|
71 |
+
return conversation_id
|
72 |
def make_prompt(entry):
|
73 |
return f"### Human: YOUR INSTRUCTION HERE,ONLY TELL A STORY,INCLUDE AT LEAST AN MCQ, FILL IN THE BLANK AND TRUE OR FALSE: {entry} ### Assistant:"
|
74 |
|
|
|
115 |
for text in streamer:
|
116 |
outputs.append(text)
|
117 |
yield "".join(outputs)
|
118 |
+
final_story = "".join(outputs) # The complete story
|
119 |
+
conversation_id = save_chat_history(chat_history + [(message, final_story)])
|
120 |
+
|
121 |
+
yield f"Conversation ID: {conversation_id}"
|
122 |
|
123 |
|
124 |
chat_interface = gr.ChatInterface(
|