Spaces:
Running
Running
import gradio as gr | |
import os | |
os.environ["KERAS_BACKEND"] = "tensorflow" | |
import keras | |
import keras_nlp | |
css = """ | |
html, body { | |
margin: 0; | |
padding: 0; | |
height: 100%; | |
overflow: hidden; | |
} | |
body::before { | |
content: ''; | |
position: fixed; | |
top: 0; | |
left: 0; | |
width: 100vw; | |
height: 100vh; | |
background-image: url('https://stsci-opo.org/STScI-01J5E849R5W27ZZ2C3QAE9ET75.png'); | |
background-size: cover; | |
background-repeat: no-repeat; | |
background-position: center; | |
opacity: 0.75; /* Fainter background image */ | |
z-index: -1; /* Keep the background behind text */ | |
} | |
.gradio-container { | |
display: flex; | |
justify-content: center; | |
align-items: center; | |
height: 100vh; /* Ensure the content is vertically centered */ | |
} | |
/* Larger text for the output area */ | |
.output_class { | |
font-size: 1em; /* Adjust this to control the size of the output text */ | |
color: black; /* Ensure the text is readable */ | |
} | |
""" | |
gemma_lm = keras_nlp.models.CausalLM.from_preset("hf://sultan-hassan/CosmoGemma_2b_en") | |
def launch(input): | |
template = "Instruction:\n{instruction}\n\nResponse:\n{response}" | |
prompt = template.format( | |
instruction=input, | |
response="", | |
) | |
out = gemma_lm.generate(prompt, max_length=256) | |
ind = out.index('Response') + len('Response')+2 | |
return out[ind:] | |
iface = gr.Interface(launch, | |
inputs="text", | |
outputs="text", | |
css=css, | |
title="Hello, I am an expert in cosmology! How can I help you today? :)", | |
description="Gemma_2b_en fine-tuned on QA pairs (~3.5k) generated from Cosmology and Nongalactic Astrophysics articles (arXiv astro-ph.CO) from 2018-2022 and tested on QA pairs (~1k) generated from 2023 articles, scoring over 75% accuracy.") | |
iface.launch() | |