import gradio as gr import os os.environ["KERAS_BACKEND"] = "tensorflow" import keras import keras_nlp css = ".gradio-container {background: url(https://stsci-opo.org/STScI-01J6D97YGSQACWK990TH56K6AF.png)}" gemma_lm = keras_nlp.models.CausalLM.from_preset("hf://sultan-hassan/CosmoGemma_2b_en") def launch(input): template = "Instruction:\n{instruction}\n\nResponse:\n{response}" prompt = template.format( instruction=input, response="", ) out = gemma_lm.generate(prompt, max_length=256) ind = out.index('Response') + len('Response')+2 return out[ind:] iface = gr.Interface(launch, inputs="text", outputs="text", css=css, title="Hello, I am an expert in cosmology, try me!", description="Gemma_2b_en fine-tuned on QA pairs (~3.5k) generated from Cosmology and Nongalactic Astrophysics articles (arXiv astro-ph.CO) from 2018-2022 and tested on QA pairs (~1k) generated from 2023 articles, scoring over 75% accuracy.") iface.launch()