Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
import gradio as gr | |
from transformers import pipeline | |
from huggingface_hub import login | |
def setup_environment(): | |
load_dotenv() | |
hf_token = os.getenv("HUGGING_FACE_TOKEN") | |
if not hf_token: | |
raise ValueError("HUGGING_FACE_TOKEN is not set in the environment variables") | |
login(token=hf_token) | |
def classify_text(text): | |
pipe = pipeline("text-classification", model="meta-llama/Prompt-Guard-86M") | |
results = pipe(text, top_k=None) | |
formatted_results = {result["label"]: result["score"] for result in results} | |
top_label = max(results, key=lambda x: x["score"])["label"] | |
return formatted_results, top_label | |
def create_interface(): | |
return gr.Interface( | |
fn=classify_text, | |
inputs="text", | |
outputs=[ | |
gr.Label(label="Classification Results", num_top_classes=3), | |
gr.Textbox(label="Top Classification"), | |
], | |
title="Prompt-Guard-86M Text Classification", | |
) | |
if __name__ == "__main__": | |
setup_environment() | |
demo = create_interface() | |
demo.launch(server_name="0.0.0.0", server_port=7860) | |