Hjgugugjhuhjggg commited on
Commit
64cb25e
1 Parent(s): 8e4fcb7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +135 -96
app.py CHANGED
@@ -1,18 +1,17 @@
1
  import os
2
  import json
3
- import logging
4
  import uuid
 
5
  import threading
6
- import io
7
  from fastapi import FastAPI, HTTPException
8
  from pydantic import BaseModel
9
  from google.cloud import storage
10
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
11
- import uvicorn
12
- import torch
13
- import requests
14
- from safetensors import safe_open
15
  from dotenv import load_dotenv
 
 
16
 
17
  load_dotenv()
18
 
@@ -29,7 +28,7 @@ try:
29
  storage_client = storage.Client.from_service_account_info(credentials_info)
30
  bucket = storage_client.bucket(GCS_BUCKET_NAME)
31
  logger.info(f"Conexión con Google Cloud Storage exitosa. Bucket: {GCS_BUCKET_NAME}")
32
- except (json.JSONDecodeError, KeyError, ValueError) as e:
33
  logger.error(f"Error al cargar las credenciales o bucket: {e}")
34
  raise RuntimeError(f"Error al cargar las credenciales o bucket: {e}")
35
 
@@ -45,60 +44,39 @@ class GCSHandler:
45
  self.bucket = storage_client.bucket(bucket_name)
46
 
47
  def file_exists(self, blob_name):
48
- return self.bucket.blob(blob_name).exists()
 
 
 
 
 
 
 
 
 
 
 
49
 
50
  def download_file(self, blob_name):
51
  blob = self.bucket.blob(blob_name)
52
  if not blob.exists():
 
53
  raise HTTPException(status_code=404, detail=f"File '{blob_name}' not found.")
54
- return blob.download_as_bytes()
55
-
56
- def upload_file(self, blob_name, file_data):
57
- blob = self.bucket.blob(blob_name)
58
- blob.upload_from_file(file_data)
59
 
60
  def generate_signed_url(self, blob_name, expiration=3600):
61
  blob = self.bucket.blob(blob_name)
62
- return blob.generate_signed_url(expiration=expiration)
63
-
64
- def create_folder(self, folder_name):
65
- blob = self.bucket.blob(folder_name + "/")
66
- blob.upload_from_string("") # Create an empty "folder"
67
-
68
- def load_model_from_gcs(model_name: str, model_files: list):
69
- gcs_handler = GCSHandler(GCS_BUCKET_NAME)
70
- model_blobs = {file: gcs_handler.download_file(f"{model_name}/{file}") for file in model_files}
71
-
72
- model_stream = model_blobs.get("pytorch_model.bin") or model_blobs.get("model.safetensors")
73
- config_stream = model_blobs.get("config.json")
74
- tokenizer_stream = model_blobs.get("tokenizer.json")
75
-
76
- if model_stream and model_stream.endswith(".safetensors"):
77
- model = load_safetensors_model(model_stream)
78
- else:
79
- model = AutoModelForCausalLM.from_pretrained(io.BytesIO(model_stream), config=config_stream)
80
-
81
- tokenizer = AutoTokenizer.from_pretrained(io.BytesIO(tokenizer_stream))
82
-
83
- return model, tokenizer
84
-
85
- def load_safetensors_model(model_stream):
86
- with safe_open(io.BytesIO(model_stream), framework="pt") as model_data:
87
- model = torch.load(model_data)
88
- return model
89
-
90
- def get_model_files_from_gcs(model_name: str):
91
- gcs_handler = GCSHandler(GCS_BUCKET_NAME)
92
- blob_list = list(gcs_handler.bucket.list_blobs(prefix=f"{model_name}/"))
93
- model_files = [blob.name for blob in blob_list if any(part in blob.name for part in ["pytorch_model", "model"]) and "index" not in blob.name]
94
- model_files = sorted(model_files)
95
- return model_files
96
 
97
  def download_model_from_huggingface(model_name):
98
  url = f"https://huggingface.co/{model_name}/tree/main"
99
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}"}
100
 
101
  try:
 
102
  response = requests.get(url, headers=headers)
103
  if response.status_code == 200:
104
  model_files = [
@@ -107,90 +85,151 @@ def download_model_from_huggingface(model_name):
107
  "tokenizer.json",
108
  "model.safetensors",
109
  ]
110
- def download_file(file_name):
111
  file_url = f"https://huggingface.co/{model_name}/resolve/main/{file_name}"
112
  file_content = requests.get(file_url).content
113
  blob_name = f"{model_name}/{file_name}"
114
  blob = bucket.blob(blob_name)
115
  blob.upload_from_string(file_content)
116
-
117
- threads = [threading.Thread(target=download_file, args=(file_name,)) for file_name in model_files]
118
- for thread in threads:
119
- thread.start()
120
- for thread in threads:
121
- thread.join()
122
  else:
 
123
  raise HTTPException(status_code=404, detail="Error al acceder al árbol de archivos de Hugging Face.")
124
  except Exception as e:
 
125
  raise HTTPException(status_code=500, detail=f"Error descargando archivos de Hugging Face: {e}")
126
 
127
- def download_model_files(model_name: str):
128
- model_files = get_model_files_from_gcs(model_name)
129
- if not model_files:
130
- download_model_from_huggingface(model_name)
131
- model_files = get_model_files_from_gcs(model_name)
132
- return model_files
133
-
134
  @app.post("/predict/")
135
  async def predict(request: DownloadModelRequest):
 
136
  try:
137
  gcs_handler = GCSHandler(GCS_BUCKET_NAME)
138
  model_prefix = request.model_name
 
 
 
 
 
 
139
 
140
- model_files = download_model_files(model_prefix)
141
- model, tokenizer = load_model_from_gcs(model_prefix, model_files)
 
 
 
 
 
 
 
 
 
 
 
 
 
142
 
143
- pipe = pipeline(request.pipeline_task, model=model, tokenizer=tokenizer)
144
-
145
  if request.pipeline_task in ["text-generation", "translation", "summarization"]:
 
146
  result = pipe(request.input_text)
 
147
  return {"response": result[0]}
148
 
149
  elif request.pipeline_task == "image-generation":
150
- images = pipe(request.input_text)
151
- image = images[0]
152
- image_filename = f"{uuid.uuid4().hex}.png"
153
- image_path = f"images/{image_filename}"
154
- image.save(image_path)
155
- gcs_handler.upload_file(image_path, open(image_path, "rb"))
156
- image_url = gcs_handler.generate_signed_url(image_path)
157
- return {"response": {"image_url": image_url}}
 
 
 
 
 
 
 
158
 
159
  elif request.pipeline_task == "image-editing":
160
- edited_images = pipe(request.input_text)
161
- edited_image = edited_images[0]
162
- edited_image_filename = f"{uuid.uuid4().hex}_edited.png"
163
- edited_image.save(edited_image_filename)
164
- gcs_handler.upload_file(f"images/{edited_image_filename}", open(edited_image_filename, "rb"))
165
- edited_image_url = gcs_handler.generate_signed_url(f"images/{edited_image_filename}")
166
- return {"response": {"edited_image_url": edited_image_url}}
 
 
 
 
 
 
 
167
 
168
  elif request.pipeline_task == "image-to-image":
169
- transformed_images = pipe(request.input_text)
170
- transformed_image = transformed_images[0]
171
- transformed_image_filename = f"{uuid.uuid4().hex}_transformed.png"
172
- transformed_image.save(transformed_image_filename)
173
- gcs_handler.upload_file(f"images/{transformed_image_filename}", open(transformed_image_filename, "rb"))
174
- transformed_image_url = gcs_handler.generate_signed_url(f"images/{transformed_image_filename}")
175
- return {"response": {"transformed_image_url": transformed_image_url}}
 
 
 
 
 
 
 
176
 
177
  elif request.pipeline_task == "text-to-3d":
178
- model_3d_filename = f"{uuid.uuid4().hex}.obj"
179
- model_3d_path = f"3d-models/{model_3d_filename}"
180
- with open(model_3d_path, "w") as f:
181
- f.write("Simulated 3D model data")
182
- gcs_handler.upload_file(f"3d-models/{model_3d_filename}", open(model_3d_path, "rb"))
183
- model_3d_url = gcs_handler.generate_signed_url(f"3d-models/{model_3d_filename}")
184
- return {"response": {"model_3d_url": model_3d_url}}
 
 
 
 
 
 
185
 
186
  except HTTPException as e:
 
187
  raise e
188
  except Exception as e:
 
189
  raise HTTPException(status_code=500, detail=f"Error: {e}")
190
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  @app.on_event("startup")
192
  async def startup_event():
193
- logger.info("Iniciando la API...")
194
 
195
  if __name__ == "__main__":
196
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
  import os
2
  import json
 
3
  import uuid
4
+ import requests
5
  import threading
6
+ import logging
7
  from fastapi import FastAPI, HTTPException
8
  from pydantic import BaseModel
9
  from google.cloud import storage
10
+ from google.auth import exceptions
11
+ from transformers import pipeline
 
 
 
12
  from dotenv import load_dotenv
13
+ import uvicorn
14
+ import io
15
 
16
  load_dotenv()
17
 
 
28
  storage_client = storage.Client.from_service_account_info(credentials_info)
29
  bucket = storage_client.bucket(GCS_BUCKET_NAME)
30
  logger.info(f"Conexión con Google Cloud Storage exitosa. Bucket: {GCS_BUCKET_NAME}")
31
+ except (exceptions.DefaultCredentialsError, json.JSONDecodeError, KeyError, ValueError) as e:
32
  logger.error(f"Error al cargar las credenciales o bucket: {e}")
33
  raise RuntimeError(f"Error al cargar las credenciales o bucket: {e}")
34
 
 
44
  self.bucket = storage_client.bucket(bucket_name)
45
 
46
  def file_exists(self, blob_name):
47
+ exists = self.bucket.blob(blob_name).exists()
48
+ logger.debug(f"Comprobando existencia de archivo '{blob_name}': {exists}")
49
+ return exists
50
+
51
+ def upload_file(self, blob_name, file_stream):
52
+ blob = self.bucket.blob(blob_name)
53
+ try:
54
+ blob.upload_from_file(file_stream)
55
+ logger.info(f"Archivo '{blob_name}' subido exitosamente a GCS.")
56
+ except Exception as e:
57
+ logger.error(f"Error subiendo el archivo '{blob_name}' a GCS: {e}")
58
+ raise HTTPException(status_code=500, detail=f"Error subiendo archivo '{blob_name}' a GCS")
59
 
60
  def download_file(self, blob_name):
61
  blob = self.bucket.blob(blob_name)
62
  if not blob.exists():
63
+ logger.error(f"Archivo '{blob_name}' no encontrado en GCS.")
64
  raise HTTPException(status_code=404, detail=f"File '{blob_name}' not found.")
65
+ logger.debug(f"Descargando archivo '{blob_name}' de GCS.")
66
+ return blob.open("rb") # Abre el archivo en modo lectura de bytes
 
 
 
67
 
68
  def generate_signed_url(self, blob_name, expiration=3600):
69
  blob = self.bucket.blob(blob_name)
70
+ url = blob.generate_signed_url(expiration=expiration)
71
+ logger.debug(f"Generada URL firmada para '{blob_name}': {url}")
72
+ return url
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
  def download_model_from_huggingface(model_name):
75
  url = f"https://huggingface.co/{model_name}/tree/main"
76
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}"}
77
 
78
  try:
79
+ logger.info(f"Descargando el modelo '{model_name}' desde Hugging Face...")
80
  response = requests.get(url, headers=headers)
81
  if response.status_code == 200:
82
  model_files = [
 
85
  "tokenizer.json",
86
  "model.safetensors",
87
  ]
88
+ for file_name in model_files:
89
  file_url = f"https://huggingface.co/{model_name}/resolve/main/{file_name}"
90
  file_content = requests.get(file_url).content
91
  blob_name = f"{model_name}/{file_name}"
92
  blob = bucket.blob(blob_name)
93
  blob.upload_from_string(file_content)
94
+ logger.info(f"Archivo '{file_name}' subido exitosamente al bucket GCS.")
 
 
 
 
 
95
  else:
96
+ logger.error(f"Error al acceder al árbol de archivos de Hugging Face para '{model_name}'.")
97
  raise HTTPException(status_code=404, detail="Error al acceder al árbol de archivos de Hugging Face.")
98
  except Exception as e:
99
+ logger.error(f"Error descargando archivos de Hugging Face: {e}")
100
  raise HTTPException(status_code=500, detail=f"Error descargando archivos de Hugging Face: {e}")
101
 
 
 
 
 
 
 
 
102
  @app.post("/predict/")
103
  async def predict(request: DownloadModelRequest):
104
+ logger.info(f"Iniciando predicción para el modelo '{request.model_name}' con tarea '{request.pipeline_task}'...")
105
  try:
106
  gcs_handler = GCSHandler(GCS_BUCKET_NAME)
107
  model_prefix = request.model_name
108
+ model_files = [
109
+ "pytorch_model.bin",
110
+ "config.json",
111
+ "tokenizer.json",
112
+ "model.safetensors",
113
+ ]
114
 
115
+ model_files_exist = all(gcs_handler.file_exists(f"{model_prefix}/{file}") for file in model_files)
116
+
117
+ if not model_files_exist:
118
+ logger.info(f"Modelos no encontrados en GCS, descargando '{model_prefix}' desde Hugging Face...")
119
+ download_model_from_huggingface(model_prefix)
120
+
121
+ model_files_streams = {file: gcs_handler.download_file(f"{model_prefix}/{file}") for file in model_files if gcs_handler.file_exists(f"{model_prefix}/{file}")}
122
+
123
+ config_stream = model_files_streams.get("config.json")
124
+ tokenizer_stream = model_files_streams.get("tokenizer.json")
125
+ model_stream = model_files_streams.get("pytorch_model.bin")
126
+
127
+ if not config_stream or not tokenizer_stream or not model_stream:
128
+ logger.error(f"Faltan archivos necesarios para el modelo '{model_prefix}'.")
129
+ raise HTTPException(status_code=500, detail="Required model files missing.")
130
 
 
 
131
  if request.pipeline_task in ["text-generation", "translation", "summarization"]:
132
+ pipe = pipeline(request.pipeline_task, model=model_stream, tokenizer=tokenizer_stream)
133
  result = pipe(request.input_text)
134
+ logger.info(f"Resultado generado para la tarea '{request.pipeline_task}': {result[0]}")
135
  return {"response": result[0]}
136
 
137
  elif request.pipeline_task == "image-generation":
138
+ try:
139
+ pipe = pipeline("image-generation", model=model_stream)
140
+ images = pipe(request.input_text)
141
+ image = images[0]
142
+ image_filename = f"{uuid.uuid4().hex}.png"
143
+ image_path = f"images/{image_filename}"
144
+ image.save(image_path)
145
+
146
+ gcs_handler.upload_file(image_path, open(image_path, "rb"))
147
+ image_url = gcs_handler.generate_signed_url(image_path)
148
+ logger.info(f"Imagen generada y subida correctamente con URL: {image_url}")
149
+ return {"response": {"image_url": image_url}}
150
+ except Exception as e:
151
+ logger.error(f"Error generando la imagen: {e}")
152
+ raise HTTPException(status_code=400, detail="Error generando la imagen.")
153
 
154
  elif request.pipeline_task == "image-editing":
155
+ try:
156
+ pipe = pipeline("image-editing", model=model_stream)
157
+ edited_images = pipe(request.input_text)
158
+ edited_image = edited_images[0]
159
+ edited_image_filename = f"{uuid.uuid4().hex}_edited.png"
160
+ edited_image.save(edited_image_filename)
161
+
162
+ gcs_handler.upload_file(f"images/{edited_image_filename}", open(edited_image_filename, "rb"))
163
+ edited_image_url = gcs_handler.generate_signed_url(f"images/{edited_image_filename}")
164
+ logger.info(f"Imagen editada y subida correctamente con URL: {edited_image_url}")
165
+ return {"response": {"edited_image_url": edited_image_url}}
166
+ except Exception as e:
167
+ logger.error(f"Error editando la imagen: {e}")
168
+ raise HTTPException(status_code=400, detail="Error editando la imagen.")
169
 
170
  elif request.pipeline_task == "image-to-image":
171
+ try:
172
+ pipe = pipeline("image-to-image", model=model_stream)
173
+ transformed_images = pipe(request.input_text)
174
+ transformed_image = transformed_images[0]
175
+ transformed_image_filename = f"{uuid.uuid4().hex}_transformed.png"
176
+ transformed_image.save(transformed_image_filename)
177
+
178
+ gcs_handler.upload_file(f"images/{transformed_image_filename}", open(transformed_image_filename, "rb"))
179
+ transformed_image_url = gcs_handler.generate_signed_url(f"images/{transformed_image_filename}")
180
+ logger.info(f"Imagen transformada y subida correctamente con URL: {transformed_image_url}")
181
+ return {"response": {"transformed_image_url": transformed_image_url}}
182
+ except Exception as e:
183
+ logger.error(f"Error transformando la imagen: {e}")
184
+ raise HTTPException(status_code=400, detail="Error transformando la imagen.")
185
 
186
  elif request.pipeline_task == "text-to-3d":
187
+ try:
188
+ model_3d_filename = f"{uuid.uuid4().hex}.obj"
189
+ model_3d_path = f"3d-models/{model_3d_filename}"
190
+ with open(model_3d_path, "w") as f:
191
+ f.write("Simulated 3D model data")
192
+
193
+ gcs_handler.upload_file(f"3d-models/{model_3d_filename}", open(model_3d_path, "rb"))
194
+ model_3d_url = gcs_handler.generate_signed_url(f"3d-models/{model_3d_filename}")
195
+ logger.info(f"Modelo 3D generado y subido con URL: {model_3d_url}")
196
+ return {"response": {"model_3d_url": model_3d_url}}
197
+ except Exception as e:
198
+ logger.error(f"Error generando el modelo 3D: {e}")
199
+ raise HTTPException(status_code=400, detail="Error generando el modelo 3D.")
200
 
201
  except HTTPException as e:
202
+ logger.error(f"HTTPException: {e.detail}")
203
  raise e
204
  except Exception as e:
205
+ logger.error(f"Error inesperado: {e}")
206
  raise HTTPException(status_code=500, detail=f"Error: {e}")
207
 
208
+ def download_all_models_in_background():
209
+ models_url = "https://huggingface.co/api/models"
210
+ try:
211
+ logger.info("Obteniendo lista de modelos desde Hugging Face...")
212
+ response = requests.get(models_url)
213
+ if response.status_code != 200:
214
+ logger.error("Error al obtener la lista de modelos de Hugging Face.")
215
+ raise HTTPException(status_code=500, detail="Error al obtener la lista de modelos.")
216
+
217
+ models = response.json()
218
+ for model in models:
219
+ model_name = model["id"]
220
+ logger.info(f"Descargando el modelo '{model_name}' desde Hugging Face...")
221
+ download_model_from_huggingface(model_name)
222
+ except Exception as e:
223
+ logger.error(f"Error al descargar modelos en segundo plano: {e}")
224
+ raise HTTPException(status_code=500, detail="Error al descargar modelos en segundo plano.")
225
+
226
+ def run_in_background():
227
+ logger.info("Iniciando la descarga de modelos en segundo plano...")
228
+ threading.Thread(target=download_all_models_in_background, daemon=True).start()
229
+
230
  @app.on_event("startup")
231
  async def startup_event():
232
+ run_in_background()
233
 
234
  if __name__ == "__main__":
235
  uvicorn.run(app, host="0.0.0.0", port=7860)