File size: 3,520 Bytes
5d33e01
 
 
 
2f21768
5d33e01
 
1c03f78
3739590
e8106aa
 
5d33e01
 
 
 
 
e8106aa
 
5d33e01
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0327513
6ab9a64
 
5d33e01
 
 
 
 
 
 
 
 
 
 
c3fe3c2
5d33e01
 
 
 
 
8622add
2fe4eb4
 
5d33e01
 
 
 
 
8ed9633
5d33e01
 
d1159ce
5d33e01
432bb2f
5d33e01
 
 
 
 
9671c0d
5d33e01
82fe911
5d33e01
b0bc09c
5d33e01
b0bc09c
5d33e01
82fe911
5d33e01
75bbf43
5d33e01
c3fe3c2
5ac5bd0
5d33e01
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
{
    "name": "cuda_training_transformers_text-classification_FacebookAI/roberta-base",
    "backend": {
        "name": "pytorch",
        "version": "2.4.1+cu124",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "text-classification",
        "library": "transformers",
        "model_type": "roberta",
        "model": "FacebookAI/roberta-base",
        "processor": "FacebookAI/roberta-base",
        "device": "cuda",
        "device_ids": "0",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "training",
        "_target_": "optimum_benchmark.scenarios.training.scenario.TrainingScenario",
        "max_steps": 5,
        "warmup_steps": 2,
        "dataset_shapes": {
            "dataset_size": 500,
            "sequence_length": 16,
            "num_choices": 1
        },
        "training_arguments": {
            "per_device_train_batch_size": 2,
            "gradient_accumulation_steps": 1,
            "output_dir": "./trainer_output",
            "evaluation_strategy": "no",
            "eval_strategy": "no",
            "save_strategy": "no",
            "do_train": true,
            "use_cpu": false,
            "max_steps": 5,
            "do_eval": false,
            "do_predict": false,
            "report_to": "none",
            "skip_memory_metrics": true,
            "ddp_find_unused_parameters": false
        },
        "latency": true,
        "memory": true,
        "energy": true
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "error",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7R32",
        "cpu_count": 16,
        "cpu_ram_mb": 66697.261056,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "NVIDIA A10G"
        ],
        "gpu_count": 1,
        "gpu_vram_mb": 24146608128,
        "optimum_benchmark_version": "0.5.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.44.2",
        "transformers_commit": null,
        "accelerate_version": "0.34.2",
        "accelerate_commit": null,
        "diffusers_version": "0.30.3",
        "diffusers_commit": null,
        "optimum_version": "1.22.0",
        "optimum_commit": null,
        "timm_version": "1.0.9",
        "timm_commit": null,
        "peft_version": "0.13.0",
        "peft_commit": null
    }
}