Chris4K commited on
Commit
b58a663
·
1 Parent(s): 9b78b1e

initial commit yat - yet another translater

Browse files
Files changed (1) hide show
  1. app.py +131 -0
app.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+
3
+ from fastapi import FastAPI
4
+
5
+ app = FastAPI()
6
+
7
+ class HuggingFaceAPI:
8
+ def __init__(self, token):
9
+ self.token = token
10
+
11
+ def send_request(self, url, method, body):
12
+ headers = {
13
+ "Authorization": f"Bearer {self.token}",
14
+ "Content-Type": "application/json"
15
+ }
16
+
17
+ if method == "GET":
18
+ response = requests.get(url, headers=headers)
19
+ elif method == "POST":
20
+ response = requests.post(url, headers=headers, json=body)
21
+ else:
22
+ raise ValueError(f"Unsupported HTTP method: {method}")
23
+
24
+ response.raise_for_status()
25
+ return response.json()
26
+
27
+ def text_translation(self, text, target_language):
28
+ source_language = self.language_detection(text)
29
+ url = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-"+source_language+"-"+target_language
30
+ method = "POST"
31
+ body = {
32
+ "inputs": text
33
+ }
34
+ return self.send_request(url, method, body)
35
+
36
+
37
+ def text_translation(self, text, source_language, target_language):
38
+ #return ""
39
+ url = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-"+source_language+"-"+target_language
40
+ method = "POST"
41
+ body = {
42
+ "inputs": text
43
+ }
44
+ return self.send_request(url, method, body)
45
+
46
+ def language_detection(self, text):
47
+ url = "https://api-inference.huggingface.co/models/papluca/xlm-roberta-base-language-detection"
48
+ method = "POST"
49
+ body = {
50
+ "inputs": text
51
+ }
52
+ return self.send_request(url, method, body)
53
+
54
+ # ... existing API endpoints ...
55
+
56
+ @app.post("/hf-inference/language_detection")
57
+ async def language_detection_api(text: str):
58
+ language_detection_response = api.language_detection(text)
59
+ return language_detection_response
60
+
61
+ @app.post("/hf-inference/text_translation")
62
+ async def text_translation_api(text: str, source_language:str, target_language: str):
63
+ text_translation_response = api.text_translation(text, source_language, target_language)
64
+ return text_translation_response
65
+
66
+ @app.post("/hf-inference/text_translation")
67
+ async def text_translation_api(text: str, target_language: str):
68
+ text_translation_response = api.text_translation(text, target_language)
69
+ return text_translation_response
70
+
71
+ ### ENd of Fast API endpoints
72
+
73
+ api = HuggingFaceAPI( {api_hf_key} )
74
+
75
+ # Define the function to be called when inputs are provided
76
+ def hf_inference_translate(prompt="Wie kann ich Ihnen helfen?", target_language="en"):
77
+ print(prompt)
78
+ # Call the respective API methods
79
+ # Get the input language
80
+ chat_response_languagedetected = api.language_detection(text)
81
+ print(chat_response_languagedetected[0][0])
82
+ # Translate based on input prompt, detected language and chosen target language
83
+ text_translation_response = api.text_translation(prompt, chat_response_languagedetected[0][0]['label'], target_language)
84
+ print(text_translation_response)
85
+ # Extract the labels and scores from the result
86
+ label_scores = {entry['label']: entry['score'] for entry in chat_response_languagedetected[0][:3]}
87
+ print(label_scores)
88
+ # Return the API responses #
89
+ return text_translation_response[0]['translation_text'],label_scores
90
+
91
+ text = "Hallo, ich bin Christof. Wie geht es dir?"
92
+ #text = "Меня зовут Вольфганг и я живу в Берлине"
93
+ translation_response = hf_inference_translate(text, "en")
94
+ print(translation_response)
95
+
96
+
97
+
98
+ import gradio as gr
99
+ import requests
100
+
101
+
102
+ iface = gr.Interface(
103
+ fn=hf_inference_translate,
104
+ inputs=[
105
+ gr.inputs.Textbox(label="Input", lines=5, placeholder="Enter text to translate"),
106
+ gr.inputs.Dropdown(["en", "fr", "de", "es", "ch", "ru"], label="Select target language")
107
+ ],
108
+ outputs=[
109
+ gr.outputs.Textbox(label="Translated text"),
110
+ gr.outputs.Label(label="Detected languages", num_top_classes=3)
111
+ ],
112
+ title="Translation Interface",
113
+ description="Type something in any language below and then click Run to see the output in the chosen target language.",
114
+ examples=[["Wie geht es Dir?", "fr"], ["Do you need help?", "de"]],
115
+ article="## Text Examples",
116
+ article_description="Use examples",
117
+ #live=True,
118
+ debug=True
119
+ )
120
+
121
+
122
+
123
+ # Create a Gradio interface
124
+ #queue
125
+ iface.queue(concurrency_count=3)
126
+ # Run the Gradio interface
127
+ #iface.launch(share=True)
128
+ iface.launch(share=True, debug=True)
129
+
130
+
131
+