oceansweep commited on
Commit
0a1a0fd
·
verified ·
1 Parent(s): a4ca640

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -40
app.py CHANGED
@@ -2748,46 +2748,7 @@ def chunk_transcript(transcript: str, chunk_duration: int, words_per_second) ->
2748
  return chunks
2749
 
2750
 
2751
- def summarize_chunks(api_name: str, api_key: str, transcript: List[dict], chunk_duration: int,
2752
- words_per_second: int) -> str:
2753
- if api_name not in summarizers: # See 'summarizers' dict in the main script
2754
- return f"Unsupported API: {api_name}"
2755
-
2756
- if not transcript:
2757
- logging.error("Empty or None transcript provided to summarize_chunks")
2758
- return "Error: Empty or None transcript provided"
2759
-
2760
- text = extract_text_from_segments(transcript)
2761
- chunks = chunk_transcript(text, chunk_duration, words_per_second)
2762
-
2763
- custom_prompt = args.custom_prompt
2764
-
2765
- summaries = []
2766
- for chunk in chunks:
2767
- if api_name == 'openai':
2768
- # Ensure the correct model and prompt are passed
2769
- summaries.append(summarize_with_openai(api_key, chunk, custom_prompt))
2770
- elif api_name == 'anthropic':
2771
- summaries.append(summarize_with_cohere(api_key, chunk, anthropic_model, custom_prompt))
2772
- elif api_name == 'cohere':
2773
- summaries.append(summarize_with_claude(api_key, chunk, cohere_model, custom_prompt))
2774
- elif api_name == 'groq':
2775
- summaries.append(summarize_with_groq(api_key, chunk, groq_model, custom_prompt))
2776
- elif api_name == 'llama':
2777
- summaries.append(summarize_with_llama(llama_api_IP, chunk, api_key, custom_prompt))
2778
- elif api_name == 'kobold':
2779
- summaries.append(summarize_with_kobold(kobold_api_IP, chunk, api_key, custom_prompt))
2780
- elif api_name == 'ooba':
2781
- summaries.append(summarize_with_oobabooga(ooba_api_IP, chunk, api_key, custom_prompt))
2782
- elif api_name == 'tabbyapi':
2783
- summaries.append(summarize_with_vllm(api_key, tabby_api_IP, chunk, summarize.llm_model, custom_prompt))
2784
- elif api_name == 'local-llm':
2785
- summaries.append(summarize_with_local_llm(chunk, custom_prompt))
2786
- else:
2787
- return f"Unsupported API: {api_name}"
2788
-
2789
- return "\n\n".join(summaries)
2790
-
2791
  # FIXME - WHole section needs to be re-written
2792
  def summarize_with_detail_openai(text, detail, verbose=False):
2793
  summary_with_detail_variable = rolling_summarize(text, detail=detail, verbose=True)
 
2748
  return chunks
2749
 
2750
 
2751
+ #
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2752
  # FIXME - WHole section needs to be re-written
2753
  def summarize_with_detail_openai(text, detail, verbose=False):
2754
  summary_with_detail_variable = rolling_summarize(text, detail=detail, verbose=True)