Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
|
|
4 |
|
5 |
"""
|
6 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
@@ -30,6 +31,11 @@ def respond(
|
|
30 |
for index, row in df.iterrows():
|
31 |
messages.append({"role": "user", "content": row['user']})
|
32 |
messages.append({"role": "assistant", "content": row['assistant']})
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
messages.append({"role": "user", "content": message})
|
35 |
print(messages)
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
4 |
+
import torch
|
5 |
|
6 |
"""
|
7 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
|
|
31 |
for index, row in df.iterrows():
|
32 |
messages.append({"role": "user", "content": row['user']})
|
33 |
messages.append({"role": "assistant", "content": row['assistant']})
|
34 |
+
|
35 |
+
selected_dfs = torch.load('selected_dfs.sav', map_location=torch.device('cpu'))
|
36 |
+
for df in selected_dfs:
|
37 |
+
messages.append({"role": "user", "content": df['user']})
|
38 |
+
messages.append({"role": "assistant", "content": df['assistant']})
|
39 |
|
40 |
messages.append({"role": "user", "content": message})
|
41 |
print(messages)
|