mrfakename commited on
Commit
bd023d0
1 Parent(s): cbf237e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  import os
5
 
6
  pipe = pipeline('audio-classification', model='mrfakename/styletts2-detector', device='cuda' if torch.cuda.is_available() else 'cpu')
7
- pipe_turbo = pipeline('audio-classification', model='mrfakename/styletts2-detector-turbo', device='cuda' if torch.cuda.is_available() else 'cpu', token=os.getenv('HF_TOKEN'))
8
 
9
  ABOUT = """
10
  # 🤔 Did StyleTTS 2 Generate It?
@@ -41,10 +41,11 @@ def classify(audio, model):
41
  with gr.Blocks() as demo:
42
  gr.Markdown(ABOUT)
43
  aud = gr.Audio(label="Upload audio...", interactive=True, type="filepath")
44
- model = gr.Radio(["default", "turbo"], label="Model", info="Which model do you want to use? Default is lightweight and efficient, Turbo is more robust and powerful.", value="default", interactive=True)
45
  btn = gr.Button("Classify", variant="primary")
46
  res = gr.Label(label="Results...")
47
- btn.click(classify, inputs=[aud, model], outputs=res)
 
48
  gr.Markdown(DISCLAIMER)
49
 
50
  demo.queue(default_concurrency_limit=20, max_size=20, api_open=False).launch(show_api=False)
 
4
  import os
5
 
6
  pipe = pipeline('audio-classification', model='mrfakename/styletts2-detector', device='cuda' if torch.cuda.is_available() else 'cpu')
7
+ #pipe_turbo = pipeline('audio-classification', model='mrfakename/styletts2-detector-turbo', device='cuda' if torch.cuda.is_available() else 'cpu', token=os.getenv('HF_TOKEN'))
8
 
9
  ABOUT = """
10
  # 🤔 Did StyleTTS 2 Generate It?
 
41
  with gr.Blocks() as demo:
42
  gr.Markdown(ABOUT)
43
  aud = gr.Audio(label="Upload audio...", interactive=True, type="filepath")
44
+ #model = gr.Radio(["default", "turbo"], label="Model", info="Which model do you want to use? Default is lightweight and efficient, Turbo is more robust and powerful.", value="default", interactive=True)
45
  btn = gr.Button("Classify", variant="primary")
46
  res = gr.Label(label="Results...")
47
+ #btn.click(classify, inputs=[aud, model], outputs=res)
48
+ btn.click(classify, inputs=[aud], outputs=res)
49
  gr.Markdown(DISCLAIMER)
50
 
51
  demo.queue(default_concurrency_limit=20, max_size=20, api_open=False).launch(show_api=False)