Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -42,16 +42,16 @@ def generate(
|
|
42 |
top_k: int = 50,
|
43 |
repetition_penalty: float = 1.2,
|
44 |
) -> Iterator[str]:
|
|
|
45 |
conversation = [json.loads(os.getenv("PROMPT"))]
|
46 |
-
#
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
)
|
55 |
conversation.append({"role": "user", "content": message})
|
56 |
|
57 |
input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
|
|
|
42 |
top_k: int = 50,
|
43 |
repetition_penalty: float = 1.2,
|
44 |
) -> Iterator[str]:
|
45 |
+
conversation = []
|
46 |
conversation = [json.loads(os.getenv("PROMPT"))]
|
47 |
+
# for user, assistant in chat_history:
|
48 |
+
# conversation.extend(
|
49 |
+
# [
|
50 |
+
# json.loads(os.getenv("PROMPT")),
|
51 |
+
# {"role": "user", "content": user},
|
52 |
+
# {"role": "assistant", "content": assistant},
|
53 |
+
# ]
|
54 |
+
# )
|
|
|
55 |
conversation.append({"role": "user", "content": message})
|
56 |
|
57 |
input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
|