File size: 1,140 Bytes
a61342f
 
bfd1f36
776def7
a61342f
 
 
f51ee54
 
 
 
 
a61342f
f51ee54
776def7
 
 
f51ee54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
776def7
bfd1f36
a61342f
f51ee54
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import os
from dotenv import load_dotenv
import gradio as gr
from transformers import pipeline
from huggingface_hub import login


def setup_environment():
    load_dotenv()
    hf_token = os.getenv("HUGGING_FACE_TOKEN")
    if not hf_token:
        raise ValueError("HUGGING_FACE_TOKEN is not set in the environment variables")
    login(token=hf_token)


def classify_text(text):
    pipe = pipeline("text-classification", model="meta-llama/Prompt-Guard-86M")
    results = pipe(text, top_k=None)

    formatted_results = {result["label"]: result["score"] for result in results}
    top_label = max(results, key=lambda x: x["score"])["label"]

    return formatted_results, top_label


def create_interface():
    return gr.Interface(
        fn=classify_text,
        inputs="text",
        outputs=[
            gr.Label(label="Classification Results", num_top_classes=3),
            gr.Textbox(label="Top Classification"),
        ],
        title="Prompt-Guard-86M Text Classification",
    )


if __name__ == "__main__":
    setup_environment()
    demo = create_interface()
    demo.launch(server_name="0.0.0.0", server_port=7860)