File size: 713 Bytes
8605d91
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
import torch
from safetensors.torch import load_file, save_file

def lazy_load_and_convert(input_file_path, output_file_path):
    # Load the safetensors file lazily
    lazy_tensors = load_file(input_file_path, device="cuda:0")
    
    # Convert each tensor to torch.float8_e4m3fn
    converted_tensors = {key: value.to(torch.float8_e4m3fn) for key, value in lazy_tensors.items()}
    
    # Save the converted tensors to a new safetensors file
    save_file(converted_tensors, output_file_path)

if __name__ == "__main__":
    input_file_path = "path/to/your/input_file.safetensors"
    output_file_path = "path/to/your/output_file.safetensors"
    
    lazy_load_and_convert(input_file_path, output_file_path)