Xilixmeaty40 commited on
Commit
6671f1e
·
verified ·
1 Parent(s): 3edcab0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -20
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from gradio_client import Client
3
- from huggingface_hub import InferenceClient, HFRepository
4
 
5
  import random
6
 
@@ -21,23 +21,6 @@ for model in models:
21
  except Exception as e:
22
  print(f"Error loading model {model}: {str(e)}")
23
 
24
- # Cargar conjuntos de datos desde un archivo de texto
25
- def load_datasets_from_file(filename):
26
- with open(filename, 'r') as file:
27
- datasets = [line.strip() for line in file.readlines() if line.strip()]
28
- return datasets
29
-
30
- # Cargar conjuntos de datos desde un archivo de texto (datasets.txt)
31
- datasets = load_datasets_from_file("datasets.txt")
32
-
33
- # Crear clientes de repositorio para cada conjunto de datos
34
- repository_clients = []
35
- for dataset in datasets:
36
- try:
37
- repository_clients.append(HFRepository(dataset))
38
- except Exception as e:
39
- print(f"Error loading dataset {dataset}: {str(e)}")
40
-
41
  VERBOSE = False
42
 
43
  def format_prompt(message, history, cust_p):
@@ -52,8 +35,8 @@ def format_prompt(message, history, cust_p):
52
  return prompt
53
 
54
  def chat_inf(system_prompt, prompt, history, memory, client_choice, seed, temp, tokens, top_p, rep_p, chat_mem, cust_p):
55
- if not clients or not repository_clients:
56
- yield [("Error", "No models or datasets available")], memory
57
  else:
58
  try:
59
  client = clients[int(client_choice) - 1]
 
1
  import gradio as gr
2
  from gradio_client import Client
3
+ from huggingface_hub import InferenceClient
4
 
5
  import random
6
 
 
21
  except Exception as e:
22
  print(f"Error loading model {model}: {str(e)}")
23
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  VERBOSE = False
25
 
26
  def format_prompt(message, history, cust_p):
 
35
  return prompt
36
 
37
  def chat_inf(system_prompt, prompt, history, memory, client_choice, seed, temp, tokens, top_p, rep_p, chat_mem, cust_p):
38
+ if not clients:
39
+ yield [("Error", "No models available")], memory
40
  else:
41
  try:
42
  client = clients[int(client_choice) - 1]