Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -111,11 +111,14 @@ def transcribe(stream, new_chunk):
|
|
111 |
input_features = ASR_processor(
|
112 |
array, sampling_rate=16000, return_tensors="pt"
|
113 |
).input_features
|
|
|
114 |
input_features = input_features.to("cpu", dtype=getattr(torch, "float16"))
|
115 |
pred_ids = ASR_model.generate(input_features, max_new_tokens=128, min_new_tokens=0, num_beams=1, return_timestamps=False,task="transcribe",language="en")
|
|
|
116 |
prompt = ASR_processor.batch_decode(
|
117 |
pred_ids, skip_special_tokens=True, decode_with_timestamps=False
|
118 |
)[0]
|
|
|
119 |
# prompt=ASR_model.transcribe(array)["text"].strip()
|
120 |
chat.append({"role": user_role, "content": prompt})
|
121 |
chat_messages = chat.to_list()
|
|
|
111 |
input_features = ASR_processor(
|
112 |
array, sampling_rate=16000, return_tensors="pt"
|
113 |
).input_features
|
114 |
+
print(input_features)
|
115 |
input_features = input_features.to("cpu", dtype=getattr(torch, "float16"))
|
116 |
pred_ids = ASR_model.generate(input_features, max_new_tokens=128, min_new_tokens=0, num_beams=1, return_timestamps=False,task="transcribe",language="en")
|
117 |
+
print(pred_ids)
|
118 |
prompt = ASR_processor.batch_decode(
|
119 |
pred_ids, skip_special_tokens=True, decode_with_timestamps=False
|
120 |
)[0]
|
121 |
+
print(prompt)
|
122 |
# prompt=ASR_model.transcribe(array)["text"].strip()
|
123 |
chat.append({"role": user_role, "content": prompt})
|
124 |
chat_messages = chat.to_list()
|