LLMServer / main /utils /helpers.py
AurelioAguirre's picture
mooved folders
9ad1b29
raw
history blame
1.42 kB
import psutil
import torch
from pathlib import Path
from typing import Dict, Any
def get_system_info() -> Dict[str, Any]:
"""Get system resource information"""
return {
"cpu_percent": psutil.cpu_percent(),
"memory_percent": psutil.virtual_memory().percent,
"gpu_available": torch.cuda.is_available(),
"gpu_memory_used": torch.cuda.memory_allocated() if torch.cuda.is_available() else 0,
"gpu_memory_total": torch.cuda.get_device_properties(0).total_memory if torch.cuda.is_available() else 0
}
def calculate_optimal_batch_size(model_size: int, available_memory: int) -> int:
"""Calculate optimal batch size based on model size and available memory"""
memory_per_sample = model_size * 1.5 # Rough estimate including overhead
return max(1, available_memory // memory_per_sample)
def ensure_folder_structure(config: Dict) -> None:
"""Ensure all necessary folders exist"""
folders = [
Path(config["folders"]["models"]),
Path(config["folders"]["cache"]),
Path(config["folders"]["logs"])
]
for folder in folders:
folder.mkdir(parents=True, exist_ok=True)
def format_memory_size(size_bytes: int) -> str:
"""Format memory size to human readable format"""
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
if size_bytes < 1024:
return f"{size_bytes:.2f}{unit}"
size_bytes /= 1024