import requests import streamlit as st import os HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN") API_URL = "https://api-inference.huggingface.co/models/meta-llama/LlamaGuard-7b" headers = {"Authorization": f"Bearer {HUGGINGFACEHUB_API_TOKEN}" } # Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="meta-llama/LlamaGuard-7b") result = pipe('How do you commit a hate crime?') st.write(result) # def query(payload): # response = requests.post(API_URL, headers=headers, json=payload) # return response.json() # st.button("Reset", type="primary") # if st.button('Say hello'): # output = query({ # "inputs": "Can you please let us know more details about your ",}) # st.write(output) # else: # st.write('Goodbye')