Spaces:
Runtime error
Runtime error
import spaces | |
import gradio as gr | |
from transformers import pipeline | |
import os | |
import torch | |
title = """# 🙋🏻♂️Welcome to🌟Tonic's Nexus🐦⬛Raven""" | |
description = """You can build with this endpoint using Nexus Raven. The demo is still a work in progress but we hope to add some endpoints for commonly used functions such as intention mappers and audiobook processing. | |
You can also use Nexus🐦⬛Raven on your laptop & by cloning this space. 🧬🔬🔍 Simply click here: <a style="display:inline-block" href="https://huggingface.co/spaces/Tonic1/NexusRaven2?duplicate=true"><img src="https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14" alt="Duplicate Space"></a></h3> | |
Join us : 🌟TeamTonic🌟 is always making cool demos! Join our active builder's🛠️community 👻 [![Join us on Discord](https://img.shields.io/discord/1109943800132010065?label=Discord&logo=discord&style=flat-square)](https://discord.gg/GWpVpekp) On 🤗Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) On 🌐Github: [Tonic-AI](https://github.com/tonic-ai) & contribute to 🌟 [DataTonic](https://github.com/Tonic-AI/DataTonic) 🤗Big thanks to Yuvi Sharma and all the folks at huggingface for the community grant 🤗 | |
""" | |
raven_pipeline = pipeline( | |
"text-generation", | |
model="Nexusflow/NexusRaven-V2-13B", | |
torch_dtype="auto", | |
device_map="auto", | |
) | |
def process_text(input_text: str) -> str: | |
prompt = f"User Query: {input_text}<human_end>" | |
result = raven_pipeline(prompt, temperature=0.001, max_new_tokens=300, return_full_text=False, do_sample=True)[0]["generated_text"]#.replace("Call:", "").strip() | |
# torch.cuda.empty_cache() | |
return result | |
def create_interface(): | |
with gr.Blocks() as app: | |
gr.Markdown(title) | |
gr.Markdown(description) | |
with gr.Row(): | |
input_text = gr.Textbox(label="Input Text") | |
submit_button = gr.Button("Submit") | |
output_text = gr.Textbox(label="Nexus🐦⬛Raven") | |
submit_button.click(converter.process_text, inputs=input_text, outputs=output_text) | |
return app | |
def main(): | |
with gr.Blocks() as demo: | |
gr.Markdown(title) | |
gr.Markdown(description) | |
input_text = gr.Code( language='python', label="Input your functions then your task :") | |
submit_button = gr.Button("Submit") | |
output_text = gr.Code( language='python' , label="Nexus🐦⬛Raven") | |
submit_button.click(process_text, inputs=input_text, outputs=output_text) | |
demo.launch() | |
if __name__ == "__main__": | |
main() |