frncscp commited on
Commit
decbe2a
1 Parent(s): 2bc7e7e

Update pages/Entorno de Ejecución.py

Browse files
Files changed (1) hide show
  1. pages/Entorno de Ejecución.py +27 -27
pages/Entorno de Ejecución.py CHANGED
@@ -177,45 +177,45 @@ with vit:
177
  elif uploaded_file is not None:
178
  with st.spinner('Cargando predicción...'):
179
 
180
- y_gorritoo = query(uploaded_file.read(), model_dict[model_choice[0]])
181
- st.write(y_gorritoo)
182
- #classifiers = [pipeline("image-classification", model= model_dict[model_choice[i]]) for i in range(len(model_choice))]
183
 
184
  #classifier = pipeline("image-classification", model= model_dict[model_choice[0]])
185
  img = preprocess(uploaded_file, module = 'pil')
186
 
187
  models = [model_dict[model] for model in model_choice]
188
  #st.write(models)
189
- #def vit_ensemble(classifier_list, img):
190
- # y_gorrito = 0
191
- # for classifier in classifier_list:
192
- # classifier = classifier(img)
193
- # for clase in classifier:
194
- # if clase['label'] == 'Patacon-True':
195
- # y_gorrito += clase["score"]
196
- # return y_gorrito / len(classifier_list)
197
 
198
  #models = [model_dict[i] for i in range(len(model_choice))]
199
  #st.write(type(models), models)
200
 
201
  #st.write(model_choice)
202
 
203
- y_gorrito = 0
204
  #y_gorrito = query(uploaded_file.read(), model_choice[0])[1]["score"]
205
- i = -1
206
- st.write("loop iniciado")
207
- for model in models:
208
- i+=1
209
- st.write("y gorrito a cargar")
210
- a = query(uploaded_file.read(), model)
211
- if a == -1:
212
- st.write("Los servidores se encuentrar caídos, intente más tarde")
213
- break
214
- st.write("query terminado")
215
- y_gorrito += a
216
- st.write("y gorrito cargado")
217
- y_gorrito /= i
218
- st.write("loop terminado")
219
 
220
  #st.write("y gorrito calculado", len(model_choice))
221
  #classifier = classifier(img)
@@ -226,7 +226,7 @@ with vit:
226
 
227
  #y_gorrito = classifier[0]["score"]
228
 
229
- #y_gorrito = vit_ensemble(classifiers, img)
230
  #
231
  if round(float(y_gorrito * 100)) >= threshold:
232
  st.success("¡Patacón Detectado!")
 
177
  elif uploaded_file is not None:
178
  with st.spinner('Cargando predicción...'):
179
 
180
+ #y_gorritoo = query(uploaded_file.read(), model_dict[model_choice[0]])
181
+ #st.write(y_gorritoo)
182
+ classifiers = [pipeline("image-classification", model= model_dict[model_choice[i]]) for i in range(len(model_choice))]
183
 
184
  #classifier = pipeline("image-classification", model= model_dict[model_choice[0]])
185
  img = preprocess(uploaded_file, module = 'pil')
186
 
187
  models = [model_dict[model] for model in model_choice]
188
  #st.write(models)
189
+ def vit_ensemble(classifier_list, img):
190
+ y_gorrito = 0
191
+ for classifier in classifier_list:
192
+ classifier = classifier(img)
193
+ for clase in classifier:
194
+ if clase['label'] == 'Patacon-True':
195
+ y_gorrito += clase["score"]
196
+ return y_gorrito / len(classifier_list)
197
 
198
  #models = [model_dict[i] for i in range(len(model_choice))]
199
  #st.write(type(models), models)
200
 
201
  #st.write(model_choice)
202
 
203
+ #y_gorrito = 0
204
  #y_gorrito = query(uploaded_file.read(), model_choice[0])[1]["score"]
205
+ #i = -1
206
+ #st.write("loop iniciado")
207
+ #for model in models:
208
+ # i+=1
209
+ # st.write("y gorrito a cargar")
210
+ # a = query(uploaded_file.read(), model)
211
+ # if a == -1:
212
+ # st.write("Los servidores se encuentrar caídos, intente más tarde")
213
+ # break
214
+ # st.write("query terminado")
215
+ # y_gorrito += a
216
+ # st.write("y gorrito cargado")
217
+ #y_gorrito /= i
218
+ #st.write("loop terminado")
219
 
220
  #st.write("y gorrito calculado", len(model_choice))
221
  #classifier = classifier(img)
 
226
 
227
  #y_gorrito = classifier[0]["score"]
228
 
229
+ y_gorrito = vit_ensemble(classifiers, img)
230
  #
231
  if round(float(y_gorrito * 100)) >= threshold:
232
  st.success("¡Patacón Detectado!")