Spaces:
Sleeping
Sleeping
File size: 1,771 Bytes
6365a5c db0c11c f6301fe db0c11c ea4e97a db0c11c 6365a5c 3856411 6365a5c db0c11c eb6af71 db0c11c eb6af71 db0c11c 030ef7f eb6af71 db0c11c 6365a5c db0c11c 6365a5c db0c11c 6365a5c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import gradio as gr
import os
os.environ["KERAS_BACKEND"] = "tensorflow"
import keras
import keras_nlp
css = """
html, body {
margin: 0;
padding: 0;
height: 100%;
overflow: hidden;
}
body::before {
content: '';
position: fixed;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background-image: url('https://unsplash.com/photos/a-bunch-of-pills-are-in-a-glass-container-mfjoslXbb-8');
background-size: cover;
background-repeat: no-repeat;
opacity: 0.75; /* Faint background image */
background-position: center;
z-index: -1; /* Keep the background behind text */
}
.gradio-container {
display: flex;
justify-content: center;
align-items: center;
height: 100vh; /* Ensure the content is vertically centered */
}
"""
gemma_lm = keras_nlp.models.CausalLM.from_preset("hf://EmmaGozie/gemma-medic-bot-2b-en")
def launch(input):
template = "Instruction:\n{instruction}\n\nResponse:\n{response}"
prompt = template.format(
instruction=input,
response="",
)
out = gemma_lm.generate(prompt, max_length=256)
ind = out.index('Response') + len('Response')+2
return out[ind:]
iface = gr.Interface(launch,
inputs="text",
outputs="text",
css=css,
title="Hey I am Gozie-medicbot 👋 I can answer health-related questions, including drug usage, dosage, diseases, treatments, and side effects. Try me :)",
description="Gemma_2b_en is fine-tuned on a comprehensive medical Q&A dataset")
iface.launch()
|