File size: 2,247 Bytes
94a93b4
 
 
 
6f25160
94a93b4
 
 
 
 
6f25160
94a93b4
f3fece9
 
94a93b4
 
 
 
 
 
 
 
 
 
 
 
ae86868
94a93b4
 
 
 
 
 
0c66d16
94a93b4
 
 
 
 
 
 
 
 
 
 
 
0c66d16
 
bff897b
 
 
0c66d16
 
94a93b4
 
 
 
f3fece9
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import gradio as gr
import re
import requests
import json
import os

title = "BLOOM"
description = "Gradio Demo for BLOOM. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."

API_URL = "https://hfbloom.ngrok.io/generate"
HF_API_TOKEN = os.getenv("HF_API_TOKEN")

hf_writer = gr.HuggingFaceDatasetSaver(HF_API_TOKEN, "huggingface/bloom_internal_prompts", organization="huggingface")


examples = [
    ['A "whatpu" is a small, furry animal native to Tanzania. An example of a sentence that uses the word whatpu is: We were traveling in Africa and we saw these very cute whatpus. To do a "farduddle" means to jump up and down really fast. An example of a sentence that uses the word farduddle is:']
]

def safe_text(text):
    text = text.replace('%', '\\%25')
    text = text.replace('#', '\\%23')
    text = text.replace('+', '\\%2B')
    text = text.replace('*', '\\%2A')
    text = text.replace('&', '\\%26')
    text = re.sub(r"([$_*\[\]()~`>\#\+\-=|\.!{}])", r"\\\1", text)
    return f"<pre>{text}</pre>"


def query(payload):
    response = requests.request("POST", API_URL, json=payload)
    return json.loads(response.content.decode("utf-8"))
    
def inference(input_sentence, max_length, temperature, greedy_decoding, top_k, top_p, seed=42):
    top_k = None if top_k == 0 else top_k
    payload = {"inputs": input_sentence,
               "parameters": {"max_new_tokens": max_length, "top_k": top_k, "top_p": top_p, "temperature": temperature,
                              "do_sample": not greedy_decoding, "seed": seed}}
    data = query(
        payload
    )
    return data[0]['generated_text'][len(input_sentence):]


gr.Interface(
    inference, 
    [
        gr.inputs.Textbox(label="Input"),
        gr.inputs.Slider(1, 64, default=8, label="Tokens to generate"),
        gr.inputs.Slider(0, 64, default=0, label="Top K"),
        gr.inputs.Slider(0.0, 10, default=0.9, step=0.05, label="Top P"),
        gr.inputs.Checkbox(False, label="Greedy decoding"),
    ], 
    gr.outputs.Textbox(label="Output"),
    examples=examples,
    # article=article,
    title=title,
    description=description,
    flagging_options=["save"],
    flagging_callback=hf_writer
).launch()