Spaces:
Build error
Build error
import gradio as gr | |
from huggingface_hub import InferenceClient | |
from transformers import pipeline | |
# Hugging Face Inference API Client | |
client = InferenceClient("bigscience/bloom") | |
# Hugging Face Transformers Pipeline for Question Answering | |
qa_pipeline = pipeline("question-answering", model="distilbert-base-cased-distilled-squad") | |
# Code Snippets | |
code_snippets = { | |
"fibonacci": { | |
"python": """ | |
def fib(n): | |
if n <= 0: | |
return 0 | |
elif n == 1: | |
return 1 | |
else: | |
return fib(n-1) + fib(n-2) | |
""", | |
"javascript": """ | |
function fib(n) { | |
if (n <= 0) return 0; | |
if (n === 1) return 1; | |
return fib(n - 1) + fib(n - 2); | |
} | |
""" | |
} | |
} | |
# Chatbot Function | |
def chatbot(message, history): | |
if "python" in message.lower() and "fibonacci" in message.lower(): | |
return "Here is the Fibonacci code in Python:", gr.Code(language="python", value=code_snippets["fibonacci"]["python"]) | |
elif "javascript" in message.lower() and "fibonacci" in message.lower(): | |
return "Here is the Fibonacci code in JavaScript:", gr.Code(language="javascript", value=code_snippets["fibonacci"]["javascript"]) | |
elif "huggingface" in message.lower(): | |
# Generate text using the Hugging Face Inference API | |
prompt = "Write a short poem about cybersecurity." | |
response = client.text_generation(prompt, max_length=50) | |
return f"Hugging Face Generated Text: {response['generated_text']}", None | |
elif "question" in message.lower(): | |
# Use the QA pipeline to answer a question | |
question = "What is the purpose of cybersecurity?" | |
context = "Cybersecurity involves protecting systems, networks, and programs from digital attacks." | |
result = qa_pipeline(question=question, context=context) | |
return f"Hugging Face QA Answer: {result['answer']}", None | |
else: | |
return "Please ask about Python/JavaScript code or Hugging Face functionalities.", None | |
# Gradio Interface | |
with gr.Blocks() as demo: | |
code_output = gr.Code(render=False) | |
with gr.Row(): | |
with gr.Column(): | |
gr.Markdown("<center><h1>Chat About Code or Hugging Face</h1></center>") | |
gr.ChatInterface( | |
chatbot, | |
examples=[ | |
"Python Fibonacci", | |
"JavaScript Fibonacci", | |
"HuggingFace: Generate text", | |
"Ask a question about cybersecurity", | |
], | |
additional_outputs=[code_output], | |
type="messages" | |
) | |
with gr.Column(): | |
gr.Markdown("<center><h1>Code or Model Output</h1></center>") | |
code_output.render() | |
# Launch the Gradio App | |
demo.launch() |