File size: 4,645 Bytes
b58a663
ac08f84
 
fd85460
b58a663
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f237cd8
b58a663
 
 
 
 
 
2f85679
c46ee0f
1bbaedf
b58a663
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1d34fd3
b58a663
 
 
 
 
8ee4722
b58a663
b6c3422
b58a663
 
 
9024fe6
b6c3422
b58a663
 
 
 
 
 
 
 
 
8ee4722
b58a663
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import requests
import os 

#os.environ["hf_api_key"] = {hf_api_key}

from fastapi import FastAPI

app = FastAPI()

class HuggingFaceAPI:
    def __init__(self, token):
        self.token = token

    def send_request(self, url, method, body):
        headers = {
            "Authorization": f"Bearer {self.token}",
            "Content-Type": "application/json"
        }

        if method == "GET":
            response = requests.get(url, headers=headers)
        elif method == "POST":
            response = requests.post(url, headers=headers, json=body)
        else:
            raise ValueError(f"Unsupported HTTP method: {method}")

        response.raise_for_status()
        return response.json()

    def text_translation(self, text, target_language):
        source_language = self.language_detection(text) 
        url = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-"+source_language+"-"+target_language
        method = "POST"
        body = {
            "inputs": text
        }
        return self.send_request(url, method, body)


    def text_translation(self, text, source_language, target_language):
      #return ""
        url = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-"+source_language+"-"+target_language
        method = "POST"
        body = {
            "inputs": text
        }
        return self.send_request(url, method, body)

    def language_detection(self, text):
        url = "https://api-inference.huggingface.co/models/papluca/xlm-roberta-base-language-detection"
        method = "POST"
        body = {
            "inputs": text
        }
        return self.send_request(url, method, body)

# ... existing API endpoints ...

@app.post("/hf-inference/language_detection")
async def language_detection_api(text: str):
    language_detection_response = api.language_detection(text)
    return language_detection_response

@app.post("/hf-inference/text_translation")
async def text_translation_api(text: str, source_language:str, target_language: str):
    text_translation_response = api.text_translation(text, source_language, target_language)
    return text_translation_response

@app.post("/hf-inference/text_translation")
async def text_translation_api(text: str, target_language: str):
    text_translation_response = api.text_translation(text, target_language)
    return text_translation_response

### ENd of Fast API endpoints

api = HuggingFaceAPI( os.environ.get("hf_api_key") )

# Define the function to be called when inputs are provided
def hf_inference_translate(prompt="Wie kann ich Ihnen helfen?",  target_language="en"):
    print(prompt)
    # Call the respective API methods 
    # Get the input language
    chat_response_languagedetected = ""
    chat_response_languagedetected = api.language_detection(prompt)
    print(chat_response_languagedetected[0][0]['label'])
    # Translate based on input prompt, detected language and chosen target language
    text_translation_response = api.text_translation(prompt, chat_response_languagedetected[0][0]['label'], target_language) 
    print(text_translation_response) 
    # Extract the labels and scores from the result
    label_scores = {entry['label']: entry['score'] for entry in chat_response_languagedetected[0][:3]}
    print(label_scores)
    # Return the API responses #
    return  text_translation_response[0]['translation_text'],label_scores

text = "Hallo, ich bin Christof. Wie geht es dir?"
#text = "Меня зовут Вольфганг и я живу в Берлине"
translation_response = hf_inference_translate(text, "en")
print(translation_response)



import gradio as gr
import requests
 

iface = gr.Interface(
    fn=hf_inference_translate,
    inputs=[
        gr.inputs.Textbox(label="Input", lines=5, placeholder="Enter text to translate"),
        gr.inputs.Dropdown(["en", "fr", "de", "es", "ru"], default="de", label="Select target language")
    ],
    outputs=[
        gr.outputs.Textbox(label="Translated text"),
        gr.outputs.Label(label="Detected languages", num_top_classes=3)
    ],
    title="🧐 Translation Interface",
    description="Type something in any language below and then click Run to see the output in the chosen target language.",
    examples=[["Wie geht es Dir?", "fr"], ["Do you need help?", "de"], ["J'ai besoin d'aide ?", "en"]],
    article="## Text Examples",
    article_description="Use examples",
    #live=True,
    debug=True,
    cache_examples=True
)

 

# Create a Gradio interface
#queue
iface.queue(concurrency_count=3)
# Run the Gradio interface
#iface.launch(share=True)
iface.launch(debug=True)