Pendrokar commited on
Commit
462fbb0
1 Parent(s): dee6d0d

send user's token to Zero GPU Spaces

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -956,7 +956,7 @@ def doresample(path_to_wav):
956
  # 2x speedup (hopefully) #
957
  ##########################
958
 
959
- def synthandreturn(text):
960
  text = text.strip()
961
  if len(text) > MAX_SAMPLE_TXT_LENGTH:
962
  raise gr.Error(f'You exceeded the limit of {MAX_SAMPLE_TXT_LENGTH} characters')
@@ -996,7 +996,9 @@ def synthandreturn(text):
996
  # pointless saving of text to DB
997
  # log_text(text)
998
  print("[debug] Using", mdl1, mdl2)
999
- def predict_and_update_result(text, model, result_storage):
 
 
1000
  # 3 attempts
1001
  attempt_count = 0
1002
  while attempt_count < 3:
@@ -1005,7 +1007,7 @@ def synthandreturn(text):
1005
  if '/' in model:
1006
  # Use public HF Space
1007
  #if (model not in hf_clients):
1008
- hf_clients[model] = Client(model, hf_token=hf_token)
1009
  mdl_space = hf_clients[model]
1010
 
1011
  # print(f"{model}: Fetching endpoints of HF Space")
@@ -1131,8 +1133,8 @@ def synthandreturn(text):
1131
  if mdl2 in AVAILABLE_MODELS.keys(): mdl2k=AVAILABLE_MODELS[mdl2]
1132
  results = {}
1133
  print(f"Sending models {mdl1k} and {mdl2k} to API")
1134
- thread1 = threading.Thread(target=predict_and_update_result, args=(text, mdl1k, results))
1135
- thread2 = threading.Thread(target=predict_and_update_result, args=(text, mdl2k, results))
1136
 
1137
  thread1.start()
1138
  thread2.start()
 
956
  # 2x speedup (hopefully) #
957
  ##########################
958
 
959
+ def synthandreturn(text, request: gr.Request):
960
  text = text.strip()
961
  if len(text) > MAX_SAMPLE_TXT_LENGTH:
962
  raise gr.Error(f'You exceeded the limit of {MAX_SAMPLE_TXT_LENGTH} characters')
 
996
  # pointless saving of text to DB
997
  # log_text(text)
998
  print("[debug] Using", mdl1, mdl2)
999
+ def predict_and_update_result(text, model, result_storage, request:gr.Request):
1000
+
1001
+ x_ip_token = request.headers['x-ip-token']
1002
  # 3 attempts
1003
  attempt_count = 0
1004
  while attempt_count < 3:
 
1007
  if '/' in model:
1008
  # Use public HF Space
1009
  #if (model not in hf_clients):
1010
+ hf_clients[model] = Client(model, hf_token=hf_token, headers={"X-IP-Token": x_ip_token})
1011
  mdl_space = hf_clients[model]
1012
 
1013
  # print(f"{model}: Fetching endpoints of HF Space")
 
1133
  if mdl2 in AVAILABLE_MODELS.keys(): mdl2k=AVAILABLE_MODELS[mdl2]
1134
  results = {}
1135
  print(f"Sending models {mdl1k} and {mdl2k} to API")
1136
+ thread1 = threading.Thread(target=predict_and_update_result, args=(text, mdl1k, results, request))
1137
+ thread2 = threading.Thread(target=predict_and_update_result, args=(text, mdl2k, results, request))
1138
 
1139
  thread1.start()
1140
  thread2.start()