Spanicin commited on
Commit
358991e
·
verified ·
1 Parent(s): 116013b

Update app_parallel.py

Browse files
Files changed (1) hide show
  1. app_parallel.py +17 -19
app_parallel.py CHANGED
@@ -72,7 +72,9 @@ app = Flask(__name__)
72
 
73
  TEMP_DIR = None
74
  start_time = None
75
- future_to_chunk = {}
 
 
76
 
77
  app.config['temp_response'] = None
78
  app.config['generation_thread'] = None
@@ -350,29 +352,25 @@ def parallel_processing():
350
  print(f"Splitting the audio into {chunk_duration}-second chunks...")
351
  audio_chunks = split_audio(driven_audio_path, chunk_duration=chunk_duration)
352
  print(f"Audio has been split into {len(audio_chunks)} chunks: {audio_chunks}")
353
-
354
- # future_to_chunk = {executor.submit(process_chunk, chunk[1], preprocessed_data, args): chunk[0] for chunk in audio_chunks}
355
- # return jsonify({"status": "processing started"}), 200
 
 
 
 
 
 
 
 
 
 
 
356
  try:
357
  return stream_with_context(generate_chunks(audio_chunks, preprocessed_data, args))
358
- # base64_video, temp_file_path, duration = process_chunk(driven_audio_path, preprocessed_data, args)
359
  except Exception as e:
360
  return jsonify({'status': 'error', 'message': str(e)}), 500
361
 
362
- # @app.route("/stream", methods=["GET"])
363
- # def stream_results():
364
- # global future_to_chunk
365
- # def generate():
366
- # for future in as_completed(future_to_chunk):
367
- # idx = future_to_chunk[future]
368
- # try:
369
- # base64_video, temp_file_path = future.result()
370
- # yield json.dumps({'start_time': idx, 'path': temp_file_path}).encode('utf-8')
371
- # except Exception as e:
372
- # yield json.dumps({'start_time': idx, 'error': str(e)}).encode('utf-8')
373
-
374
- # return stream_with_context(generate())
375
-
376
  @app.route("/health", methods=["GET"])
377
  def health_status():
378
  response = {"online": "true"}
 
72
 
73
  TEMP_DIR = None
74
  start_time = None
75
+ audio_chunks = []
76
+ preprocessed_data = None
77
+ args = None
78
 
79
  app.config['temp_response'] = None
80
  app.config['generation_thread'] = None
 
352
  print(f"Splitting the audio into {chunk_duration}-second chunks...")
353
  audio_chunks = split_audio(driven_audio_path, chunk_duration=chunk_duration)
354
  print(f"Audio has been split into {len(audio_chunks)} chunks: {audio_chunks}")
355
+ return jsonify({"status": "processing started, use /stream to get video chunks."}), 200
356
+
357
+ # try:
358
+ # return stream_with_context(generate_chunks(audio_chunks, preprocessed_data, args))
359
+ # # base64_video, temp_file_path, duration = process_chunk(driven_audio_path, preprocessed_data, args)
360
+ # except Exception as e:
361
+ # return jsonify({'status': 'error', 'message': str(e)}), 500
362
+
363
+ @app.route("/stream", methods=["GET"])
364
+ def stream_results():
365
+ global audio_chunks, preprocessed_data, args
366
+ print("audio_chunks",audio_chunks)
367
+ print("preprocessed_data",preprocessed_data)
368
+ print("args",args)
369
  try:
370
  return stream_with_context(generate_chunks(audio_chunks, preprocessed_data, args))
 
371
  except Exception as e:
372
  return jsonify({'status': 'error', 'message': str(e)}), 500
373
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
374
  @app.route("/health", methods=["GET"])
375
  def health_status():
376
  response = {"online": "true"}