File size: 2,565 Bytes
bcf7fc3
 
 
 
 
 
 
 
 
 
 
f5fba71
bcf7fc3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30fda63
bcf7fc3
f5fba71
bcf7fc3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30fda63
bcf7fc3
 
 
 
 
30fda63
bcf7fc3
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import gradio as gr
import requests
import os 
import json

API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"
headers = {"Authorization": f"Bearer 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"}

def translate(prompt_ , from_lang, to_lang, input_prompt = "translate this", seed = 42):
    

    prompt =  f"To say \"{prompt_}\" in {to_lang}, you would say" 
    print(prompt)
    if len(prompt) == 0:
        prompt = input_prompt 

    json_ = {
            "inputs": prompt,
            "parameters": {
                            "top_p": 0.9,
                            "top_k": 100,
                            "temperature": 1.1,
                            "max_new_tokens": 250,
                            "return_full_text": True,
                            "do_sample": True,
                            "num_beams": 3,
                            "seed": seed,
                            "repetition_penalty": 3.0,
                          }, 
          "options": {
              "use_cache": True,
              "wait_for_model": True,
                     },
        }
    response = requests.post(API_URL,  headers=headers, json=json_)
    print(f"Response  is : {response}")
    output = json.loads(response.content.decode("utf-8"))#response.json()
    print(f"output is : {output}") 
    #output = json.loads(response.content.decode("utf-8"))
    output_tmp = output[0]['generated_text']
    print(f"output_tmp is: {output_tmp}")
    
    solution = output_tmp.split(f"\n{to_lang}:")[0]  


    if '\n\n' in solution:
        final_solution = solution.split("\n\n")[0] 
    else:
        final_solution = solution
        
    return final_solution

demo = gr.Blocks()

with demo:
    gr.Markdown("<h1><center>Bloom Translation</center></h1>")

    with gr.Row():
        from_lang = gr.Dropdown(['English', 'Spanish', 'Hindi' , 'Bangla'], 
                                value='English', 
                                label='select From language : ')
    with gr.Row():
        to_lang = gr.Dropdown(['English', 'Spanish', 'Hindi'], 
                              value='Hindi', 
                              label= 'select to Language : ')

    input_prompt = gr.Textbox(label="Enter the sentence : ", 
                            value=f"Please write the text here :",
                            lines=6)

    generated_txt = gr.Textbox(lines=3)

    b1 = gr.Button("translate")
    b1.click(translate,inputs=[ input_prompt, from_lang, to_lang], outputs=generated_txt) 
    
demo.launch(enable_queue=True, debug=True)