Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ print ("Inf.Client")
|
|
18 |
client = InferenceClient("https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf")
|
19 |
|
20 |
# generate function
|
21 |
-
def generate(text):
|
22 |
payload = tokenizer.apply_chat_template([{"role":"user","content":text}],tokenize=False)
|
23 |
res = client.text_generation(
|
24 |
payload,
|
|
|
18 |
client = InferenceClient("https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf")
|
19 |
|
20 |
# generate function
|
21 |
+
def generate(text, history):
|
22 |
payload = tokenizer.apply_chat_template([{"role":"user","content":text}],tokenize=False)
|
23 |
res = client.text_generation(
|
24 |
payload,
|