File size: 1,128 Bytes
4797321
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr

tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-mono")
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-mono")

# text = "create a function recieve two arguments that return sum as a result"
example_text = [ 'create a function to calculate the n!', "create a function recieve two arguments that return sum as a result"]

def get_code(prompt):
    input_ids = tokenizer(prompt, return_tensors="pt").input_ids

    generated_ids = model.generate(input_ids, max_length=128)
    return tokenizer.decode(generated_ids[0], skip_special_tokens=True)

demo = gr.Blocks()

with demo:
    gr.Markdown(
        "## This Demo will generate python code only upto 128 tokens "
    )
    with gr.Row():
        inputs = gr.Textbox(label='Prompt for generating code', lines=5)
        outputs = gr.Textbox(label='Python Code', lines=10)
    b1 = gr.Button('Generate Code')
    gr.Examples(examples=example_text, inputs= inputs, outputs= outputs)

    b1.click(fn = get_code,inputs= inputs, outputs= outputs )
demo.launch()