File size: 1,582 Bytes
f35f208
 
 
 
 
2c44633
 
 
f35f208
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2c44633
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import psutil
import torch
from pathlib import Path
from typing import Dict, Any

import yaml


def get_system_info() -> Dict[str, Any]:
    """Get system resource information"""
    return {
        "cpu_percent": psutil.cpu_percent(),
        "memory_percent": psutil.virtual_memory().percent,
        "gpu_available": torch.cuda.is_available(),
        "gpu_memory_used": torch.cuda.memory_allocated() if torch.cuda.is_available() else 0,
        "gpu_memory_total": torch.cuda.get_device_properties(0).total_memory if torch.cuda.is_available() else 0
    }

def calculate_optimal_batch_size(model_size: int, available_memory: int) -> int:
    """Calculate optimal batch size based on model size and available memory"""
    memory_per_sample = model_size * 1.5  # Rough estimate including overhead
    return max(1, available_memory // memory_per_sample)

def ensure_folder_structure(config: Dict) -> None:
    """Ensure all necessary folders exist"""
    folders = [
        Path(config["folders"]["models"]),
        Path(config["folders"]["cache"]),
        Path(config["folders"]["logs"])
    ]
    for folder in folders:
        folder.mkdir(parents=True, exist_ok=True)

def format_memory_size(size_bytes: int) -> str:
    """Format memory size to human readable format"""
    for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
        if size_bytes < 1024:
            return f"{size_bytes:.2f}{unit}"
        size_bytes /= 1024

def load_config():
    """Load configuration from yaml file"""
    with open("main/resources/config.yaml", "r") as f:
        return yaml.safe_load(f)