File size: 215 Bytes
e074046
 
 
 
 
 
1
2
3
4
5
6
from transformers import prune_model

model = torch.load('merged_model.safetensors')
pruned_model = prune_model(model, amount=0.25)  # Removes 20% of parameters

torch.save(pruned_model, 'merged_model.safetensors')