lhpku20010120 commited on
Commit
672b9f6
·
verified ·
1 Parent(s): 1d08196

Upload busy.py

Browse files
Files changed (1) hide show
  1. busy.py +34 -0
busy.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import time
3
+ from multiprocessing import Pool, set_start_method
4
+
5
+
6
+ def run_on_single_gpu(device):
7
+ a = torch.randn(1000,1000).cuda(device)
8
+ b = torch.randn(1000,1000).cuda(device)
9
+ ta = a
10
+ tb = b
11
+ while True:
12
+ a = ta
13
+ b = tb
14
+ a = torch.sin(a)
15
+ b = torch.sin(b)
16
+ a = torch.cos(a)
17
+ b = torch.cos(b)
18
+ a = torch.tan(a)
19
+ b = torch.tan(b)
20
+ a = torch.exp(a)
21
+ b = torch.exp(b)
22
+ a = torch.log(a)
23
+ b = torch.log(b)
24
+ b = torch.matmul(a, b)
25
+ #time.sleep(0.000005)
26
+
27
+ if __name__ == '__main__':
28
+ set_start_method('spawn')
29
+ print('start running')
30
+ num_gpus = torch.cuda.device_count()
31
+ pool = Pool(processes=num_gpus)
32
+ pool.map(run_on_single_gpu, range(num_gpus))
33
+ pool.close()
34
+ pool.join()