File size: 3,508 Bytes
5f18af2
 
 
 
06a7f3e
5f18af2
 
690d894
a8b51f6
db76a42
 
5f18af2
 
 
 
 
db76a42
 
5f18af2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8b2cb62
52052fa
 
5f18af2
 
 
 
 
 
 
 
 
 
 
384aef0
5f18af2
 
 
 
 
8f6a24d
c40cd00
 
5f18af2
 
 
 
 
db3c51f
5f18af2
 
9fa92ca
5f18af2
e86855d
5f18af2
 
 
 
 
374fa6d
5f18af2
66afb2f
5f18af2
beb8d13
5f18af2
beb8d13
5f18af2
66afb2f
5f18af2
bb7e544
5f18af2
384aef0
53bb150
5f18af2
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
{
    "name": "cuda_training_transformers_multiple-choice_FacebookAI/roberta-base",
    "backend": {
        "name": "pytorch",
        "version": "2.4.1+cu124",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "multiple-choice",
        "library": "transformers",
        "model_type": "roberta",
        "model": "FacebookAI/roberta-base",
        "processor": "FacebookAI/roberta-base",
        "device": "cuda",
        "device_ids": "0",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "training",
        "_target_": "optimum_benchmark.scenarios.training.scenario.TrainingScenario",
        "max_steps": 5,
        "warmup_steps": 2,
        "dataset_shapes": {
            "dataset_size": 500,
            "sequence_length": 16,
            "num_choices": 1
        },
        "training_arguments": {
            "per_device_train_batch_size": 2,
            "gradient_accumulation_steps": 1,
            "output_dir": "./trainer_output",
            "evaluation_strategy": "no",
            "eval_strategy": "no",
            "save_strategy": "no",
            "do_train": true,
            "use_cpu": false,
            "max_steps": 5,
            "do_eval": false,
            "do_predict": false,
            "report_to": "none",
            "skip_memory_metrics": true,
            "ddp_find_unused_parameters": false
        },
        "latency": true,
        "memory": true,
        "energy": true
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "error",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7R32",
        "cpu_count": 16,
        "cpu_ram_mb": 66697.261056,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "NVIDIA A10G"
        ],
        "gpu_count": 1,
        "gpu_vram_mb": 24146608128,
        "optimum_benchmark_version": "0.5.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.45.1",
        "transformers_commit": null,
        "accelerate_version": "0.34.2",
        "accelerate_commit": null,
        "diffusers_version": "0.30.3",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.9",
        "timm_commit": null,
        "peft_version": "0.13.0",
        "peft_commit": null
    }
}