phamngoctukts commited on
Commit
37a1b39
verified
1 Parent(s): 8fc786c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -15
app.py CHANGED
@@ -141,21 +141,16 @@ def response(state:AppState = AppState()):
141
  messages.append({"role": "user", "content": [{"type": "text", "text": txt}]})
142
  buffer = "T么i kh么ng nghe r玫"
143
  try:
144
- texts = processor.apply_chat_template(messages, add_generation_prompt=True)
145
- if images == []:
146
- inputs = processor(text=texts, return_tensors="pt").to("cpu")
147
- else:
148
- inputs = processor(text=texts, images=images, return_tensors="pt").to("cpu")
149
- streamer = TextIteratorStreamer(processor, skip_special_tokens=True, skip_prompt=True)
150
- generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=max_new_tokens)
151
- thread = Thread(target=model.generate, kwargs=generation_kwargs)
152
- thread.start()
153
- buffer = ""
154
- for new_text in streamer:
155
- buffer += new_text
156
- time.sleep(0.01)
157
- state.textout=buffer
158
- textout=buffer
159
  except:
160
  print("Ch瓢a l岷 膽瓢峄 th么ng tin d峄媍h")
161
  if state.message["files"] != "":
 
141
  messages.append({"role": "user", "content": [{"type": "text", "text": txt}]})
142
  buffer = "T么i kh么ng nghe r玫"
143
  try:
144
+ token = ""
145
+ for message in client.chat_completion(
146
+ messages,
147
+ max_tokens=max_new_tokens,
148
+ stream=True,
149
+ temperature=1.0,
150
+ top_p=0.9,
151
+ ):
152
+ token += message.choices[0].delta.content
153
+ textout=token
 
 
 
 
 
154
  except:
155
  print("Ch瓢a l岷 膽瓢峄 th么ng tin d峄媍h")
156
  if state.message["files"] != "":