Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
4 |
import torch
|
|
|
5 |
|
6 |
"""
|
7 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
@@ -35,7 +36,7 @@ def respond(
|
|
35 |
selected_dfs = torch.load('selected_dfs.sav', map_location=torch.device('cpu'))
|
36 |
for df in selected_dfs:
|
37 |
df = df.dropna()
|
38 |
-
df = df.sample(df.shape[0]
|
39 |
for index, row in df.iterrows():
|
40 |
messages.append({"role": "user", "content": row['Column1.question']})
|
41 |
messages.append({"role": "assistant", "content": row['Column1.answer']})
|
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import pandas as pd
|
4 |
import torch
|
5 |
+
import math
|
6 |
|
7 |
"""
|
8 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
|
|
36 |
selected_dfs = torch.load('selected_dfs.sav', map_location=torch.device('cpu'))
|
37 |
for df in selected_dfs:
|
38 |
df = df.dropna()
|
39 |
+
df = df.sample(math.floor(df.shape[0]/10))
|
40 |
for index, row in df.iterrows():
|
41 |
messages.append({"role": "user", "content": row['Column1.question']})
|
42 |
messages.append({"role": "assistant", "content": row['Column1.answer']})
|