Spaces:
Runtime error
Runtime error
update gpu ram size for t4
Browse files
config.py
CHANGED
@@ -6,7 +6,7 @@ quantized = {
|
|
6 |
"runtimedtype": torch.bfloat16,
|
7 |
"useGPU": torch.cuda.is_available(),
|
8 |
"chunksize": 32, # larger = more accurate, but more memory (and slower)
|
9 |
-
"target":
|
10 |
}
|
11 |
|
12 |
# UNCOMMENT TO SELECT OPTIONS
|
|
|
6 |
"runtimedtype": torch.bfloat16,
|
7 |
"useGPU": torch.cuda.is_available(),
|
8 |
"chunksize": 32, # larger = more accurate, but more memory (and slower)
|
9 |
+
"target": 15 # your gpu max size, excess vram offloaded to cpu
|
10 |
}
|
11 |
|
12 |
# UNCOMMENT TO SELECT OPTIONS
|