Spaces:
Running
Running
Add warning to prevent large batch sizes when device map is set to auto
Browse files- translate.py +8 -0
translate.py
CHANGED
@@ -84,6 +84,14 @@ def main(
|
|
84 |
|
85 |
accelerator = Accelerator()
|
86 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
87 |
if precision is None:
|
88 |
quantization = None
|
89 |
dtype = None
|
|
|
84 |
|
85 |
accelerator = Accelerator()
|
86 |
|
87 |
+
if force_auto_device_map and starting_batch_size >= 64:
|
88 |
+
print(
|
89 |
+
f"WARNING: You are using a very large batch size ({starting_batch_size}) and the auto_device_map flag. "
|
90 |
+
f"auto_device_map will offload model parameters to the CPU when they don't fit on the GPU VRAM. "
|
91 |
+
f"If you use a very large batch size, it will offload a lot of parameters to the CPU and slow down the "
|
92 |
+
f"inference. You should consider using a smaller batch size, i.e '--starting_batch_size 8'"
|
93 |
+
)
|
94 |
+
|
95 |
if precision is None:
|
96 |
quantization = None
|
97 |
dtype = None
|