File size: 3,495 Bytes
0f683ae
 
 
 
f2ba5c8
0f683ae
 
f2e9234
5b0b3c8
742a96d
 
0f683ae
 
 
 
 
742a96d
 
0f683ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3c51952
c0899ff
 
0f683ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95b976c
 
0f683ae
 
 
 
 
6b4b661
0f683ae
 
6b4b661
0f683ae
e07f364
0f683ae
 
 
 
 
b80f300
0f683ae
e05ffc5
0f683ae
999ec83
0f683ae
e05ffc5
0f683ae
 
 
02fc6d5
0f683ae
 
229ea49
0f683ae
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
{
    "name": "cuda_training_transformers_text-generation_openai-community/gpt2",
    "backend": {
        "name": "pytorch",
        "version": "2.4.0+cu124",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "text-generation",
        "library": "transformers",
        "model_type": "gpt2",
        "model": "openai-community/gpt2",
        "processor": "openai-community/gpt2",
        "device": "cuda",
        "device_ids": "0",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "training",
        "_target_": "optimum_benchmark.scenarios.training.scenario.TrainingScenario",
        "max_steps": 5,
        "warmup_steps": 2,
        "dataset_shapes": {
            "dataset_size": 500,
            "sequence_length": 16,
            "num_choices": 1
        },
        "training_arguments": {
            "per_device_train_batch_size": 2,
            "gradient_accumulation_steps": 1,
            "output_dir": "./trainer_output",
            "evaluation_strategy": "no",
            "eval_strategy": "no",
            "save_strategy": "no",
            "do_train": true,
            "use_cpu": false,
            "max_steps": 5,
            "do_eval": false,
            "do_predict": false,
            "report_to": "none",
            "skip_memory_metrics": true,
            "ddp_find_unused_parameters": false
        },
        "latency": true,
        "memory": true,
        "energy": false
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "error",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7R32",
        "cpu_count": 16,
        "cpu_ram_mb": 66697.25696,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "NVIDIA A10G"
        ],
        "gpu_count": 1,
        "gpu_vram_mb": 24146608128,
        "optimum_benchmark_version": "0.4.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.44.0",
        "transformers_commit": null,
        "accelerate_version": "0.33.0",
        "accelerate_commit": null,
        "diffusers_version": "0.30.0",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.8",
        "timm_commit": null,
        "peft_version": null,
        "peft_commit": null
    }
}