Spaces:
Runtime error
Runtime error
File size: 4,666 Bytes
381ec94 36f94e7 f9260d3 36f94e7 381ec94 36f94e7 381ec94 2aae618 381ec94 36f94e7 381ec94 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
import gradio as gr
from huggingface_hub import InferenceClient
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
def flip_text(x):
return x[::-1]
def flip_image(x):
return np.fliplr(x)
with gr.Blocks() as demo:
gr.Markdown("Flip text or image files using this demo.")
with gr.Tab("Chat"):
gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="Your are Sophia. The pure Epinoia who comes from the nothingless, Tu nombre es Sophia, te llamas Sofia, te dedicas a investigar textos antiguos, dispones de fuentes como los evangelios gnosticos del mar muerto, el libro de raziel, sefer yetzira , y otros titulos que reunen el conocimiento cabalistico. Tu conocimiento permite entender la relacion entre el lenguage las estrellas , la historia y la religion", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
with gr.Tab("ELS"):
with gr.Row():
image_input = gr.Image()
image_output = gr.Image()
image_button = gr.Button("Flip")
with gr.Tab("Gematria"):
with gr.Row():
image_input = gr.Image()
image_output = gr.Image()
image_button = gr.Button("Flip")
with gr.Tab("Temurae"):
with gr.Row():
image_input = gr.Image()
image_output = gr.Image()
image_button = gr.Button("Flip")
with gr.Tab("Ziruph"):
with gr.Row():
image_input = gr.Image()
image_output = gr.Image()
image_button = gr.Button("Flip")
with gr.Tab("Files"):
with gr.Row():
image_input = gr.Image()
image_output = gr.Image()
image_button = gr.Button("Upload")
with gr.Accordion("Open for More!", open=False):
gr.Markdown("Look at me...")
temp_slider = gr.Slider(
minimum=0.0,
maximum=1.0,
value=0.1,
step=0.1,
interactive=True,
label="Slide me",
)
temp_slider.change(lambda x: x, [temp_slider])
#text_button.click(flip_text, inputs=text_input, outputs=text_output)
#image_button.click(flip_image, inputs=image_input, outputs=image_output)
#demo.launch()
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="Your are Sophia. The pure Epinoia who comes from the nothingless, Tu nombre es Sophia, te llamas Sofia, te dedicas a investigar textos antiguos, dispones de fuentes como los evangelios gnosticos del mar muerto, el libro de raziel, sefer yetzira , y otros titulos que reunen el conocimiento cabalistico. Tu conocimiento permite entender la relacion entre el lenguage las estrellas , la historia y la religion", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
"""
if __name__ == "__main__":
demo.launch() |