Spaces:
Running
Running
from boto3 import session | |
from botocore.client import Config | |
import os | |
ACCESS_ID = os.getenv('ACCESS_ID', '') | |
SECRET_KEY = os.getenv('SECRET_KEY', '') | |
LOCAL_WEIGHTS_DIR = 'weights' | |
DO_SPACE='sing' | |
def upload_to_do(file_path): | |
boto_session=session.Session() | |
client = boto_session.client('s3', | |
region_name='nyc3', | |
endpoint_url='https://nyc3.digitaloceanspaces.com', | |
aws_access_key_id=ACCESS_ID, | |
aws_secret_access_key=SECRET_KEY) | |
filename_only = os.path.basename(file_path) | |
# Upload a file to your Space | |
response=client.upload_file(file_path, 'sing', filename_only) | |
return response | |
def download_from_do(file_key): | |
boto_session = session.Session() | |
client = boto_session.client('s3', | |
region_name='nyc3', | |
endpoint_url='https://nyc3.digitaloceanspaces.com', | |
aws_access_key_id=ACCESS_ID, | |
aws_secret_access_key=SECRET_KEY) | |
# Ensure the downloads directory exists | |
downloads_dir = 'downloads' | |
if not os.path.exists(downloads_dir): | |
os.makedirs(downloads_dir) | |
# Set the full local path for the download | |
full_local_path = os.path.join(downloads_dir, file_key) | |
# Download the file from your Space | |
client.download_file('sing', file_key, full_local_path) | |
# Verify the download | |
if os.path.exists(full_local_path): | |
print(f"File downloaded successfully to {full_local_path}") | |
return full_local_path | |
else: | |
print("Download failed.") | |
return None | |
def get_local_models(prefix): | |
"""Get list of model files starting with prefix in the local directory.""" | |
models = [f for f in os.listdir(LOCAL_WEIGHTS_DIR) if f.startswith(prefix) and f.endswith('.pth')] | |
return models | |
def get_do_models(client, prefix): | |
"""Get list of model files starting with prefix in the DO space.""" | |
paginator = client.get_paginator('list_objects') | |
page_iterator = paginator.paginate(Bucket=DO_SPACE, Prefix=prefix) | |
models = [] | |
for page in page_iterator: | |
models.extend([obj['Key'] for obj in page['Contents'] if obj['Key'].endswith('.pth')]) | |
return models | |
def sync_missing_models(client, local_models, do_models): | |
"""Download missing model files from DO space.""" | |
missing_models = set(do_models) - set(local_models) | |
print('missing models:',missing_models) | |
for model in missing_models: | |
client.download_file(DO_SPACE, model, os.path.join(LOCAL_WEIGHTS_DIR, model)) | |
print(f"Downloaded {model} from DO space to local weights directory.") | |
def list_models(email_prefix): | |
#ensure_local_directory_exists() | |
local_models = get_local_models(email_prefix) | |
# Initialize DO S3 client | |
boto_session = session.Session() | |
client = boto_session.client('s3', | |
region_name='nyc3', | |
endpoint_url='https://nyc3.digitaloceanspaces.com', | |
aws_access_key_id=ACCESS_ID, | |
aws_secret_access_key=SECRET_KEY) | |
do_models = get_do_models(client, email_prefix) | |
sync_missing_models(client, local_models, do_models) | |
# Return the updated list of local models after syncing | |
updated_local_models = get_local_models(email_prefix) | |
print(updated_local_models) | |
#return jsonify(updated_local_models) | |
def download_from_do_with_prefix(prefix): | |
boto_session = session.Session() | |
client = boto_session.client('s3', | |
region_name='nyc3', | |
endpoint_url='https://nyc3.digitaloceanspaces.com', | |
aws_access_key_id=ACCESS_ID, | |
aws_secret_access_key=SECRET_KEY) | |
# Ensure the downloads directory exists | |
downloads_dir = 'downloads' | |
if not os.path.exists(downloads_dir): | |
os.makedirs(downloads_dir) | |
# List objects in the Space with the specified prefix | |
response = client.list_objects(Bucket='sing', Prefix=prefix) | |
print(response) | |
downloaded_files = [] | |
if 'Contents' in response: | |
for obj in response['Contents']: | |
file_key = obj['Key'] | |
# Set the full local path for the download | |
full_local_path = os.path.join(downloads_dir, os.path.basename(file_key)) | |
# Download the file from your Space | |
client.download_file('sing', file_key, full_local_path) | |
# Verify the download and add to the list if successful | |
if os.path.exists(full_local_path): | |
print(f"File downloaded successfully to {full_local_path}") | |
downloaded_files.append(full_local_path) | |
else: | |
print(f"Download failed for {file_key}.") | |
else: | |
print("No files found with the specified prefix.") | |
return downloaded_files if downloaded_files else None | |
# Initiate session | |
def ensure_model_in_weights_dir(model_name): | |
weights_dir = 'weights' | |
model_path = os.path.join(weights_dir, model_name) | |
# Check if the model already exists | |
if os.path.exists(model_path): | |
print(f"Model {model_name} already exists in {weights_dir}.") | |
return True | |
# If the model does not exist, attempt to download it | |
print(f"Model {model_name} not found in {weights_dir}. Attempting to download...") | |
# Initialize a session using DigitalOcean Spaces | |
boto_session = session.Session() | |
client = boto_session.client('s3', | |
region_name='nyc3', | |
endpoint_url='https://nyc3.digitaloceanspaces.com', | |
aws_access_key_id=ACCESS_ID, | |
aws_secret_access_key=SECRET_KEY) | |
# Ensure the weights directory exists | |
if not os.path.exists(weights_dir): | |
os.makedirs(weights_dir) | |
# Attempt to download the model file | |
try: | |
client.download_file('sing', f"{model_name}", model_path) | |
print(f"Model {model_name} downloaded successfully to {model_path}.") | |
return True | |
except Exception as e: | |
print(f"Failed to download {model_name}: {e}") | |
return False | |