import gradio as gr import re import requests import json import os from screenshot import BG_COMP, BOX_COMP, GENERATION_VAR, PROMPT_VAR, main from pathlib import Path title = "BLOOM" description = """Gradio Demo for BLOOM. To use it, simply add your text, or click one of the examples to load them. Tips: - Do NOT talk to BLOOM as an entity, it's not a chatbot but a webpage/blog/article completion model. - For the best results: MIMIC a few sentences of a webpage similar to the content you want to generate. Start a paragraph as if YOU were writing a blog, webpage, math post, coding article and BLOOM will generate a coherent follow-up. Longer prompts usually give more interesting results. Options: - sampling: imaginative completions (may be not super accurate e.g. math/history) - greedy: accurate completions (may be more boring or have repetitions) """ API_URL = "https://hfbloom.ngrok.io/generate" HF_API_TOKEN = os.getenv("HF_API_TOKEN") hf_writer = gr.HuggingFaceDatasetSaver(HF_API_TOKEN, "huggingface/bloom_internal_prompts", organization="huggingface") examples = [ ['A "whatpu" is a small, furry animal native to Tanzania. An example of a sentence that uses the word whatpu is: We were traveling in Africa and we saw these very cute whatpus. To do a "farduddle" means to jump up and down really fast. An example of a sentence that uses the word farduddle is:', 32, "Sample", False, "Sample 1"], ['A poem about the beauty of science by Alfred Edgar Brittle\nTitle: The Magic Craft\nIn the old times', 50, "Sample", False, "Sample 1"], ['استخراج العدد العاملي في لغة بايثون:', 30, "Greedy", False, "Sample 1"], ["Pour déguster un ortolan, il faut tout d'abord", 32, "Sample", False, "Sample 1"], ['Traduce español de España a español de Argentina\nEl coche es rojo - el auto es rojo\nEl ordenador es nuevo - la computadora es nueva\nel boligrafo es negro -', 16, "Sample", False, "Sample 1"], ['Estos ejemplos quitan vocales de las palabras\nEjemplos:\nhola - hl\nmanzana - mnzn\npapas - pps\nalacran - lcrn\npapa -', 16, "Sample",False, "Sample 1"], ["Question: If I put cheese into the fridge, will it melt?\nAnswer:", 32, "Sample", False, "Sample 1"], ["Math exercise - answers:\n34+10=44\n54+20=", 16, "Greedy", False, "Sample 1"], ["Question: Where does the Greek Goddess Persephone spend half of the year when she is not with her mother?\nAnswer:", 24, "Greedy", False, "Sample 1"], ["spelling test answers.\nWhat are the letters in « language »?\nAnswer: l-a-n-g-u-a-g-e\nWhat are the letters in « Romanian »?\nAnswer:", 24, "Greedy", False, "Sample 1"], ] def query(payload): print(payload) response = requests.request("POST", API_URL, json=payload) print(response) return json.loads(response.content.decode("utf-8")) def inference(input_sentence, max_length, sample_or_greedy, raw_text=False, seed=42): if sample_or_greedy == "Sample": parameters = {"max_new_tokens": max_length, "top_p": 0.9, "do_sample": True, "seed": seed, "early_stopping": False, "length_penalty": 0.0, "eos_token_id": None} else: parameters = {"max_new_tokens": max_length, "do_sample": False, "seed": seed, "early_stopping": False, "length_penalty": 0.0, "eos_token_id": None} payload = {"inputs": input_sentence, "parameters": parameters} data = query( payload ) if raw_text: return None, data[0]['generated_text'] width, height = 3246, 3246 assets_path = "assets" font_mapping = { "latin characters (faster)": "DejaVuSans.ttf", "complete alphabet (slower)":"GoNotoCurrent.ttf" } working_dir = Path(__file__).parent.resolve() font_path = str(working_dir / font_mapping["complete alphabet (slower)"]) img_save_path = str(working_dir / "output.jpeg") colors = { BG_COMP: "#000000", PROMPT_VAR: "#FFFFFF", GENERATION_VAR: "#FF57A0", BOX_COMP: "#120F25", } new_string = data[0]['generated_text'].split(input_sentence, 1)[1] _, img = main( input_sentence, new_string, width, height, assets_path=assets_path, font_path=font_path, colors=colors, frame_to_box_margin=200, text_to_text_box_margin=50, init_font_size=150, right_align=False, ) return img, data[0]['generated_text'] gr.Interface( inference, [ gr.inputs.Textbox(label="Input"), gr.inputs.Slider(1, 64, default=32, step=1, label="Tokens to generate"), gr.inputs.Radio(["Sample", "Greedy"], label="Sample or greedy", default="Sample"), gr.Checkbox(label="Just output raw text"), gr.inputs.Radio(["Sample 1", "Sample 2", "Sample 3", "Sample 4", "Sample 5"], default="Sample 1", label="Sample other generations (only work in 'Sample' mode", type="index"), ], ["image", "text"], examples=examples, # article=article, title=title, description=description, flagging_callback=hf_writer, allow_flagging=True, ).launch()