File size: 2,633 Bytes
ce5be86
66c7b56
 
ce5be86
effe6d7
66c7b56
4fd6f84
ce5be86
 
 
 
 
 
 
 
af519b5
ce5be86
af519b5
ce5be86
 
 
af519b5
ce5be86
af519b5
ce5be86
 
 
af519b5
ce5be86
af519b5
66c7b56
4fd6f84
 
ce5be86
 
 
 
66c7b56
4fd6f84
ce5be86
af519b5
ce5be86
af519b5
ce5be86
af519b5
ce5be86
 
 
 
 
66c7b56
bea601e
 
 
 
 
4fd6f84
cc436bf
8232ee0
 
ce5be86
81ed170
8232ee0
 
 
 
 
 
 
 
ce5be86
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import os
import gradio as gr
from transformers import pipeline

print("done 1")

# Load models
try:
    base_model = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-base-en-sh")
    print("Base model loaded successfully.")
except Exception as e:
    print(f"Error loading base model: {e}")

try:
    fine_tuned_model_1 = pipeline("translation", model="perkan/shortS-opus-mt-tc-base-en-sr")
    print("Fine-tuned model S loaded successfully.")
except Exception as e:
    print(f"Error loading fine-tuned model S: {e}")

try:
    fine_tuned_model_2 = pipeline("translation", model="perkan/shortM-opus-mt-tc-base-en-sr")
    print("Fine-tuned model M loaded successfully.")
except Exception as e:
    print(f"Error loading fine-tuned model M: {e}")

try:
    fine_tuned_model_3 = pipeline("translation", model="perkan/shortL-opus-mt-tc-base-en-sr")
    print("Fine-tuned model L loaded successfully.")
except Exception as e:
    print(f"Error loading fine-tuned model L: {e}")

# Define translation functions
def translate_base(text):
    try:
        return base_model(text)[0]['translation_text']
    except Exception as e:
        return f"Error during translation: {e}"

def translate_fine_tuned(text, model):
    try:
        if model == 'S model':
            return fine_tuned_model_1(text)[0]['translation_text']
        elif model == 'M model':
            return fine_tuned_model_2(text)[0]['translation_text']
        elif model == 'L model':
            return fine_tuned_model_3(text)[0]['translation_text']
        else:
            return "Invalid model selected"
    except Exception as e:
        return f"Error during translation: {e}"

def translate_text(text, model):
    base_translation = translate_base(text)
    fine_tuned_translation = translate_fine_tuned(text, model)
    return base_translation, fine_tuned_translation

# Create Gradio interface
with gr.Blocks() as demo:
    gr.Markdown("# Translation Models\nTranslate text using base and fine-tuned models.")
    
    with gr.Row():
        text_input = gr.Textbox(placeholder="Enter text to translate", label="Input")
        model_select = gr.Dropdown(choices=["S model", "M model", "L model"], label="Select Fine-tuned Model")
        translate_btn = gr.Button("Translate")
    
    with gr.Row():
        base_output = gr.Textbox(label="Base Model Translation")
        fine_tuned_output = gr.Textbox(label="Fine-tuned Model Translation")
    
    translate_btn.click(translate_text, inputs=[text_input, model_select], outputs=[base_output, fine_tuned_output])

port = int(os.getenv("GRADIO_SERVER_PORT", "7861"))
demo.launch(server_port=port)