Spaces:
Running
on
Zero
Running
on
Zero
polinaeterna
commited on
Commit
·
fc6b70e
1
Parent(s):
9e7216d
fix
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ import gradio as gr
|
|
10 |
import pandas as pd
|
11 |
import polars as pl
|
12 |
import matplotlib.pyplot as plt
|
13 |
-
|
14 |
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
15 |
from huggingface_hub import PyTorchModelHubMixin
|
16 |
import torch
|
@@ -50,7 +50,7 @@ model = QualityModel.from_pretrained("nvidia/quality-classifier-deberta").to(dev
|
|
50 |
model.eval()
|
51 |
|
52 |
|
53 |
-
|
54 |
def predict(texts: list[str]):
|
55 |
inputs = tokenizer(
|
56 |
texts, return_tensors="pt", padding="longest", truncation=True
|
@@ -81,11 +81,7 @@ def plot_and_df(texts, preds):
|
|
81 |
)
|
82 |
|
83 |
|
84 |
-
|
85 |
-
#
|
86 |
-
|
87 |
-
|
88 |
-
# @spaces.GPU
|
89 |
def run_quality_check(dataset, config, split, column, batch_size, num_examples):
|
90 |
logging.info(f"Fetching data for {dataset=} {config=} {split=} {column=}")
|
91 |
try:
|
|
|
10 |
import pandas as pd
|
11 |
import polars as pl
|
12 |
import matplotlib.pyplot as plt
|
13 |
+
import spaces
|
14 |
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
15 |
from huggingface_hub import PyTorchModelHubMixin
|
16 |
import torch
|
|
|
50 |
model.eval()
|
51 |
|
52 |
|
53 |
+
@spaces.GPU
|
54 |
def predict(texts: list[str]):
|
55 |
inputs = tokenizer(
|
56 |
texts, return_tensors="pt", padding="longest", truncation=True
|
|
|
81 |
)
|
82 |
|
83 |
|
84 |
+
@spaces.GPU
|
|
|
|
|
|
|
|
|
85 |
def run_quality_check(dataset, config, split, column, batch_size, num_examples):
|
86 |
logging.info(f"Fetching data for {dataset=} {config=} {split=} {column=}")
|
87 |
try:
|