MariamHussam commited on
Commit
cb562be
1 Parent(s): b507545

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -7
app.py CHANGED
@@ -14,17 +14,33 @@ class validation(BaseModel):
14
  #Fast API
15
  app = FastAPI()
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # <|im_start|>user
18
  # Translate the following text from Portuguese into English.
19
  # Portuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.
20
  # English:<|im_end|>
21
  # <|im_start|>assistant
22
-
23
  @app.post("/translate")
24
  async def stream(item: validation):
25
- translation_prompt = 'Translate the following text from Chinese into English.'
26
- S_INST = "<|im_start|>"
27
- E_INST = "<|im_end|>"
28
- user, assistant = "user", "assistant"
29
- prompt = f"{translation_prompt}\nChinese:{item.prompt}\nEnglish:"
30
- return llm(prompt)
 
 
14
  #Fast API
15
  app = FastAPI()
16
 
17
+ def translate_zh_to_en(llm, text):
18
+ response = llm.create_chat_completion(
19
+ messages = [
20
+ {
21
+ "role": "user",
22
+ "content": f"Translate the following text from Chinese into English.\nChinese: {text}\nEnglish:"
23
+ }
24
+ ],
25
+ temperature=0.2,
26
+ max_tokens=2048
27
+ )
28
+
29
+ # Assuming the response from llm.create_chat_completion is stored in a variable called response
30
+ content = response['choices'][0]['message']['content']
31
+ return content
32
+
33
  # <|im_start|>user
34
  # Translate the following text from Portuguese into English.
35
  # Portuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.
36
  # English:<|im_end|>
37
  # <|im_start|>assistant
 
38
  @app.post("/translate")
39
  async def stream(item: validation):
40
+ llm = Llama.from_pretrained(
41
+ repo_id="TheBloke/TowerInstruct-7B-v0.1-GGUF",
42
+ filename="towerinstruct-7b-v0.1.Q5_K_M.gguf",
43
+ n_ctx = 4096,
44
+ )
45
+
46
+ return translate_zh_to_en(llm, text)