Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import AutoTokenizer, TextIteratorStreamer
|
3 |
+
from modeling_nort5 import NorT5ForConditionalGeneration
|
4 |
+
from threading import Thread
|
5 |
+
|
6 |
+
|
7 |
+
# print(f"Starting to load the model to memory")
|
8 |
+
|
9 |
+
# tokenizer = AutoTokenizer.from_pretrained("nort5_en-no_base")
|
10 |
+
# cls_index = tokenizer.convert_tokens_to_ids("[CLS]")
|
11 |
+
# sep_index = tokenizer.convert_tokens_to_ids("[SEP]")
|
12 |
+
# user_index = tokenizer.convert_tokens_to_ids("[USER]")
|
13 |
+
# assistent_index = tokenizer.convert_tokens_to_ids("[ASSISTENT]")
|
14 |
+
|
15 |
+
# model = NorT5ForConditionalGeneration.from_pretrained("nort5_en-no_base", ignore_mismatched_sizes=True)
|
16 |
+
|
17 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
18 |
+
print(f"SYSTEM: Running on {device}", flush=True)
|
19 |
+
|
20 |
+
# model = model.to(device)
|
21 |
+
# model.eval()
|
22 |
+
|
23 |
+
# print(f"Sucessfully loaded the model to the memory")
|
24 |
+
|
25 |
+
|
26 |
+
INITIAL_PROMPT = "Du er NorT5, en språkmodell laget ved Universitetet i Oslo. Du er en hjelpsom og ufarlig assistent som er glade for å hjelpe brukeren med enhver forespørsel."
|
27 |
+
TEMPERATURE = 0.7
|
28 |
+
SAMPLE = True
|
29 |
+
BEAMS = 1
|
30 |
+
PENALTY = 1.2
|
31 |
+
TOP_K = 64
|
32 |
+
TOP_P = 0.95
|
33 |
+
|
34 |
+
|
35 |
+
def translate(source, source_language, target_language):
|
36 |
+
return "This is a fake translation"
|
37 |
+
|
38 |
+
|
39 |
+
import gradio as gr
|
40 |
+
|
41 |
+
with gr.Blocks(theme='sudeepshouche/minimalist') as demo:
|
42 |
+
gr.Markdown("# Norwegian-English translation")
|
43 |
+
# gr.HTML('<img src="https://huggingface.co/ltg/norbert3-base/resolve/main/norbert.png" width=6.75%>')
|
44 |
+
# gr.Checkbox(label="I want to publish all my conversations", value=True)
|
45 |
+
|
46 |
+
# chatbot = gr.Chatbot(value=[[None, "Hei, hva kan jeg gjøre for deg? 😊"]])
|
47 |
+
|
48 |
+
with gr.Row():
|
49 |
+
with gr.Column(scale=1):
|
50 |
+
source_language = gr.Dropdown(
|
51 |
+
["English", "Norwegian (Bokmål)", "Norwegian (Nynorsk)"], label="English"
|
52 |
+
)
|
53 |
+
source = gr.Textbox(
|
54 |
+
label="Source text", placeholder="What do you want to translate?", show_label=True
|
55 |
+
) # .style(container=False)
|
56 |
+
submit = gr.Button("Submit", variant="primary") # .style(full_width=True)
|
57 |
+
|
58 |
+
with gr.Column(scale=1):
|
59 |
+
target_language = gr.Dropdown(
|
60 |
+
["English", "Norwegian (Bokmål)", "Norwegian (Nynorsk)"], label="Norwegian (Bokmål)"
|
61 |
+
)
|
62 |
+
target = gr.Textbox(
|
63 |
+
label="Translation", show_label=True, interactive=False
|
64 |
+
)
|
65 |
+
|
66 |
+
|
67 |
+
def update_state_after_user():
|
68 |
+
return {
|
69 |
+
source: gr.update(interactive=False),
|
70 |
+
submit: gr.update(interactive=False),
|
71 |
+
source_language: gr.update(interactive=False),
|
72 |
+
target_language: gr.update(interactive=False)
|
73 |
+
}
|
74 |
+
|
75 |
+
def update_state_after_return():
|
76 |
+
return {
|
77 |
+
source: gr.update(interactive=True),
|
78 |
+
submit: gr.update(interactive=True),
|
79 |
+
source_language: gr.update(interactive=False),
|
80 |
+
target_language: gr.update(interactive=False)
|
81 |
+
}
|
82 |
+
|
83 |
+
|
84 |
+
submit_event = source.submit(
|
85 |
+
fn=update_state_after_user, inputs=None, outputs=[source, target, source_language, target_language], queue=False
|
86 |
+
).then(
|
87 |
+
fn=translate, inputs=[source, source_language, target_language], outputs=[target], queue=True
|
88 |
+
).then(
|
89 |
+
fn=update_state_after_return, inputs=None, outputs=[source, target, source_language, target_language], queue=False
|
90 |
+
)
|
91 |
+
|
92 |
+
submit_click_event = submit.click(
|
93 |
+
fn=update_state_after_user, inputs=None, outputs=[source, target, source_language, target_language], queue=False
|
94 |
+
).then(
|
95 |
+
fn=translate, inputs=[source, source_language, target_language], outputs=[target], queue=True
|
96 |
+
).then(
|
97 |
+
fn=update_state_after_return, inputs=None, outputs=[source, target, source_language, target_language], queue=False
|
98 |
+
)
|
99 |
+
|
100 |
+
demo.queue(max_size=32, concurrency_count=2)
|
101 |
+
demo.launch()
|