Camel-space / app.py
bragour's picture
Update app.py
cf4adb6 verified
raw
history blame
No virus
671 Bytes
import gradio as gr
import torch
from transformers import pipeline
# Initialize the inference client with the model ID
client = pipeline(model="bragour/Camel-7b-chat")
def respond(
message,
max_tokens,
temperature,
top_p,
):
# Generate the response from the API
result = client(
message,
max_new_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
)
response = result[0]['generated_text']
return response
# Define the Gradio interface
demo = gr.Interface(
fn=respond,
inputs="text",
outputs=["text"]
)
iface.launch(inline = False)
if __name__ == "__main__":
demo.launch()