IlyasMoutawwakil HF Staff commited on
Commit
2d44cd7
·
verified ·
1 Parent(s): b18b6c9

Upload cuda_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json ADDED
@@ -0,0 +1,605 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "name": "cuda_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel",
4
+ "backend": {
5
+ "name": "pytorch",
6
+ "version": "2.5.1",
7
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
+ "model": "hf-internal-testing/tiny-random-BertModel",
9
+ "processor": "hf-internal-testing/tiny-random-BertModel",
10
+ "task": "fill-mask",
11
+ "library": "transformers",
12
+ "model_type": "bert",
13
+ "device": "cuda",
14
+ "device_ids": "0",
15
+ "seed": 42,
16
+ "inter_op_num_threads": null,
17
+ "intra_op_num_threads": null,
18
+ "model_kwargs": {},
19
+ "processor_kwargs": {},
20
+ "no_weights": true,
21
+ "tp_plan": null,
22
+ "device_map": null,
23
+ "torch_dtype": null,
24
+ "eval_mode": true,
25
+ "to_bettertransformer": false,
26
+ "low_cpu_mem_usage": null,
27
+ "attn_implementation": null,
28
+ "cache_implementation": null,
29
+ "allow_tf32": false,
30
+ "autocast_enabled": false,
31
+ "autocast_dtype": null,
32
+ "torch_compile": false,
33
+ "torch_compile_target": "forward",
34
+ "torch_compile_config": {},
35
+ "quantization_scheme": null,
36
+ "quantization_config": {},
37
+ "deepspeed_inference": false,
38
+ "deepspeed_inference_config": {},
39
+ "peft_type": null,
40
+ "peft_config": {}
41
+ },
42
+ "scenario": {
43
+ "name": "inference",
44
+ "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
45
+ "iterations": 1,
46
+ "duration": 1,
47
+ "warmup_runs": 1,
48
+ "input_shapes": {
49
+ "batch_size": 2,
50
+ "sequence_length": 16,
51
+ "num_choices": 2
52
+ },
53
+ "new_tokens": null,
54
+ "memory": true,
55
+ "latency": true,
56
+ "energy": true,
57
+ "forward_kwargs": {},
58
+ "generate_kwargs": {
59
+ "max_new_tokens": 2,
60
+ "min_new_tokens": 2
61
+ },
62
+ "call_kwargs": {
63
+ "num_inference_steps": 2
64
+ }
65
+ },
66
+ "launcher": {
67
+ "name": "process",
68
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
69
+ "device_isolation": true,
70
+ "device_isolation_action": "error",
71
+ "numactl": false,
72
+ "numactl_kwargs": {},
73
+ "start_method": "spawn"
74
+ },
75
+ "environment": {
76
+ "cpu": " AMD EPYC 7742 64-Core Processor",
77
+ "cpu_count": 128,
78
+ "cpu_ram_mb": 540671.643648,
79
+ "system": "Linux",
80
+ "machine": "x86_64",
81
+ "platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.31",
82
+ "processor": "x86_64",
83
+ "python_version": "3.10.18",
84
+ "gpu": [
85
+ "NVIDIA A100-SXM4-80GB",
86
+ "NVIDIA A100-SXM4-80GB",
87
+ "NVIDIA A100-SXM4-80GB",
88
+ "NVIDIA DGX Display",
89
+ "NVIDIA A100-SXM4-80GB"
90
+ ],
91
+ "gpu_count": 5,
92
+ "gpu_vram_mb": 347892350976,
93
+ "optimum_benchmark_version": "0.6.0.dev0",
94
+ "optimum_benchmark_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
95
+ "transformers_version": "4.45.1",
96
+ "transformers_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
97
+ "accelerate_version": "1.10.0",
98
+ "accelerate_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
99
+ "diffusers_version": "0.34.0",
100
+ "diffusers_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
101
+ "optimum_version": null,
102
+ "optimum_commit": null,
103
+ "timm_version": "1.0.19",
104
+ "timm_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
105
+ "peft_version": "0.17.0",
106
+ "peft_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db"
107
+ },
108
+ "print_report": true,
109
+ "log_report": true
110
+ },
111
+ "report": {
112
+ "load_model": {
113
+ "memory": {
114
+ "unit": "MB",
115
+ "max_ram": 811.282432,
116
+ "max_global_vram": 1363.083264,
117
+ "max_process_vram": 436.207616,
118
+ "max_reserved": 2.097152,
119
+ "max_allocated": 0.389632
120
+ },
121
+ "latency": {
122
+ "unit": "s",
123
+ "values": [
124
+ 0.1634396209716797
125
+ ],
126
+ "count": 1,
127
+ "total": 0.1634396209716797,
128
+ "mean": 0.1634396209716797,
129
+ "p50": 0.1634396209716797,
130
+ "p90": 0.1634396209716797,
131
+ "p95": 0.1634396209716797,
132
+ "p99": 0.1634396209716797,
133
+ "stdev": 0,
134
+ "stdev_": 0
135
+ },
136
+ "throughput": null,
137
+ "energy": {
138
+ "unit": "kWh",
139
+ "cpu": 0.00030914322155877014,
140
+ "ram": 0.0005188749783373466,
141
+ "gpu": 0.00021641100645553024,
142
+ "total": 0.001044429206351647
143
+ },
144
+ "efficiency": null
145
+ },
146
+ "first_forward": {
147
+ "memory": {
148
+ "unit": "MB",
149
+ "max_ram": 1006.05952,
150
+ "max_global_vram": 1474.23232,
151
+ "max_process_vram": 547.356672,
152
+ "max_reserved": 23.068672,
153
+ "max_allocated": 10.112
154
+ },
155
+ "latency": {
156
+ "unit": "s",
157
+ "values": [
158
+ 0.5644544067382813
159
+ ],
160
+ "count": 1,
161
+ "total": 0.5644544067382813,
162
+ "mean": 0.5644544067382813,
163
+ "p50": 0.5644544067382813,
164
+ "p90": 0.5644544067382813,
165
+ "p95": 0.5644544067382813,
166
+ "p99": 0.5644544067382813,
167
+ "stdev": 0,
168
+ "stdev_": 0
169
+ },
170
+ "throughput": null,
171
+ "energy": {
172
+ "unit": "kWh",
173
+ "cpu": 0.0003217162053479114,
174
+ "ram": 0.0005399784620465254,
175
+ "gpu": 0.0002210579546257918,
176
+ "total": 0.001082752622020229
177
+ },
178
+ "efficiency": null
179
+ },
180
+ "forward": {
181
+ "memory": {
182
+ "unit": "MB",
183
+ "max_ram": 1007.47264,
184
+ "max_global_vram": 1474.23232,
185
+ "max_process_vram": 547.356672,
186
+ "max_reserved": 23.068672,
187
+ "max_allocated": 10.112
188
+ },
189
+ "latency": {
190
+ "unit": "s",
191
+ "values": [
192
+ 0.002642944097518921,
193
+ 0.0025815041065216063,
194
+ 0.0027904000282287598,
195
+ 0.0026204159259796142,
196
+ 0.0026224639415740967,
197
+ 0.0025784320831298826,
198
+ 0.002577408075332642,
199
+ 0.002693120002746582,
200
+ 0.0026521599292755127,
201
+ 0.002702336072921753,
202
+ 0.0026265599727630616,
203
+ 0.0026603519916534426,
204
+ 0.0025753600597381594,
205
+ 0.0026316800117492677,
206
+ 0.0026603519916534426,
207
+ 0.002638848066329956,
208
+ 0.0026204159259796142,
209
+ 0.0028887040615081785,
210
+ 0.0027207679748535154,
211
+ 0.00265830397605896,
212
+ 0.0026112000942230223,
213
+ 0.0026368000507354735,
214
+ 0.00263372802734375,
215
+ 0.0026675200462341307,
216
+ 0.0027688961029052736,
217
+ 0.0026449921131134033,
218
+ 0.0025681920051574708,
219
+ 0.0025036799907684325,
220
+ 0.00253439998626709,
221
+ 0.0025231359004974367,
222
+ 0.0025528318881988523,
223
+ 0.002540544033050537,
224
+ 0.0028354558944702147,
225
+ 0.002687999963760376,
226
+ 0.0026316800117492677,
227
+ 0.0027002880573272704,
228
+ 0.002663424015045166,
229
+ 0.002664448022842407,
230
+ 0.0026296319961547853,
231
+ 0.002698240041732788,
232
+ 0.00263372802734375,
233
+ 0.0026214399337768555,
234
+ 0.0025518081188201903,
235
+ 0.0026112000942230223,
236
+ 0.002632704019546509,
237
+ 0.002548736095428467,
238
+ 0.002549760103225708,
239
+ 0.002698240041732788,
240
+ 0.0026675200462341307,
241
+ 0.002550784111022949,
242
+ 0.0026849279403686525,
243
+ 0.00253439998626709,
244
+ 0.002540544033050537,
245
+ 0.002548736095428467,
246
+ 0.002657279968261719,
247
+ 0.0028129279613494872,
248
+ 0.002654207944869995,
249
+ 0.002584575891494751,
250
+ 0.0025671679973602295,
251
+ 0.002643968105316162,
252
+ 0.0026552319526672364,
253
+ 0.0026122241020202636,
254
+ 0.0028282880783081056,
255
+ 0.0027555840015411376,
256
+ 0.0026951680183410643,
257
+ 0.002664448022842407,
258
+ 0.002569216012954712,
259
+ 0.0026112000942230223,
260
+ 0.0025681920051574708,
261
+ 0.0025241599082946775,
262
+ 0.002769920110702515,
263
+ 0.002597887992858887,
264
+ 0.002579456090927124,
265
+ 0.0024872961044311525,
266
+ 0.002530303955078125,
267
+ 0.0025333759784698486,
268
+ 0.0026920959949493406,
269
+ 0.0027432959079742433,
270
+ 0.0027146239280700685,
271
+ 0.002711551904678345,
272
+ 0.002678783893585205,
273
+ 0.0026122241020202636,
274
+ 0.0026808319091796875,
275
+ 0.0027156479358673097,
276
+ 0.002753535985946655,
277
+ 0.002811903953552246,
278
+ 0.002687999963760376,
279
+ 0.0028569600582122805,
280
+ 0.0026951680183410643,
281
+ 0.0027197439670562746,
282
+ 0.002654207944869995,
283
+ 0.0025712640285491944,
284
+ 0.0026757121086120605,
285
+ 0.002732032060623169,
286
+ 0.002657279968261719,
287
+ 0.0026460158824920654,
288
+ 0.002672640085220337,
289
+ 0.0026419200897216796,
290
+ 0.0026122241020202636,
291
+ 0.0026460158824920654,
292
+ 0.00276582407951355,
293
+ 0.0027217919826507567,
294
+ 0.002686975955963135,
295
+ 0.002661375999450684,
296
+ 0.002604032039642334,
297
+ 0.002603008031845093,
298
+ 0.002535423994064331,
299
+ 0.002632704019546509,
300
+ 0.002668544054031372,
301
+ 0.0026408960819244384,
302
+ 0.0026081280708312986,
303
+ 0.002673664093017578,
304
+ 0.002672640085220337,
305
+ 0.002639872074127197,
306
+ 0.002632704019546509,
307
+ 0.0026972160339355468,
308
+ 0.002648063898086548,
309
+ 0.0025681920051574708,
310
+ 0.0026009600162506105,
311
+ 0.0025866239070892333,
312
+ 0.002642944097518921,
313
+ 0.0026122241020202636,
314
+ 0.002770944118499756,
315
+ 0.0026460158824920654,
316
+ 0.002543616056442261,
317
+ 0.002554879903793335,
318
+ 0.0025784320831298826,
319
+ 0.002573312044143677,
320
+ 0.0025579519271850585,
321
+ 0.0026122241020202636,
322
+ 0.0026859519481658937,
323
+ 0.002712575912475586,
324
+ 0.00268287992477417,
325
+ 0.00263372802734375,
326
+ 0.0027678720951080323,
327
+ 0.002737152099609375,
328
+ 0.0026849279403686525,
329
+ 0.002756608009338379,
330
+ 0.0027443199157714845,
331
+ 0.0027985920906066896,
332
+ 0.0027310080528259277,
333
+ 0.002693120002746582,
334
+ 0.00268287992477417,
335
+ 0.002597887992858887,
336
+ 0.0026019840240478517,
337
+ 0.002634752035140991,
338
+ 0.0025384960174560547,
339
+ 0.0025528318881988523,
340
+ 0.0025159680843353273,
341
+ 0.002594815969467163,
342
+ 0.002489343881607056,
343
+ 0.0024453120231628417,
344
+ 0.0025159680843353273,
345
+ 0.0025518081188201903,
346
+ 0.002465791940689087,
347
+ 0.002460671901702881,
348
+ 0.002476032018661499,
349
+ 0.0024401919841766356,
350
+ 0.0025159680843353273,
351
+ 0.0024494080543518066,
352
+ 0.0025620479583740234,
353
+ 0.002574336051940918,
354
+ 0.002494463920593262,
355
+ 0.0024688639640808104,
356
+ 0.002467839956283569,
357
+ 0.0024995839595794676,
358
+ 0.0024924159049987795,
359
+ 0.002514944076538086,
360
+ 0.0025251839160919188,
361
+ 0.0025763840675354006,
362
+ 0.002530303955078125,
363
+ 0.0025169920921325685,
364
+ 0.002539520025253296,
365
+ 0.0025180160999298097,
366
+ 0.0025815041065216063,
367
+ 0.0024954879283905027,
368
+ 0.0025681920051574708,
369
+ 0.002590719938278198,
370
+ 0.0025128960609436036,
371
+ 0.002480128049850464,
372
+ 0.0025088000297546387,
373
+ 0.0024954879283905027,
374
+ 0.0025333759784698486,
375
+ 0.002548736095428467,
376
+ 0.0025384960174560547,
377
+ 0.002544640064239502,
378
+ 0.0025231359004974367,
379
+ 0.002500607967376709,
380
+ 0.002514944076538086,
381
+ 0.0025088000297546387,
382
+ 0.0025036799907684325,
383
+ 0.0025118720531463624,
384
+ 0.002569216012954712,
385
+ 0.002548736095428467,
386
+ 0.0025722880363464357,
387
+ 0.0024872961044311525,
388
+ 0.002505728006362915,
389
+ 0.0025569279193878173,
390
+ 0.0025323519706726074,
391
+ 0.002543616056442261,
392
+ 0.002597887992858887,
393
+ 0.002577408075332642,
394
+ 0.0025425920486450196,
395
+ 0.0025088000297546387,
396
+ 0.0024985599517822264,
397
+ 0.002513920068740845,
398
+ 0.0025067520141601562,
399
+ 0.002573312044143677,
400
+ 0.002590719938278198,
401
+ 0.0025466880798339846,
402
+ 0.0025231359004974367,
403
+ 0.0025118720531463624,
404
+ 0.002543616056442261,
405
+ 0.002561023950576782,
406
+ 0.0024791040420532227,
407
+ 0.002573312044143677,
408
+ 0.002594815969467163,
409
+ 0.002584575891494751,
410
+ 0.0025231359004974367,
411
+ 0.0025118720531463624,
412
+ 0.0025159680843353273,
413
+ 0.002604032039642334,
414
+ 0.00263372802734375,
415
+ 0.002549760103225708,
416
+ 0.002653183937072754,
417
+ 0.002555903911590576,
418
+ 0.002594815969467163,
419
+ 0.002500607967376709,
420
+ 0.0024862720966339113,
421
+ 0.0025927679538726807,
422
+ 0.002543616056442261,
423
+ 0.0025999360084533693,
424
+ 0.0025630719661712646,
425
+ 0.002573312044143677,
426
+ 0.002632704019546509,
427
+ 0.0025077760219573975,
428
+ 0.0025968639850616456,
429
+ 0.0025292799472808837,
430
+ 0.0025528318881988523,
431
+ 0.002613248109817505,
432
+ 0.002561023950576782,
433
+ 0.002540544033050537,
434
+ 0.002548736095428467,
435
+ 0.0025569279193878173,
436
+ 0.0024924159049987795,
437
+ 0.0025835518836975096,
438
+ 0.00253439998626709,
439
+ 0.0025712640285491944,
440
+ 0.0025128960609436036,
441
+ 0.0025589759349822997,
442
+ 0.0025876479148864745,
443
+ 0.002514944076538086,
444
+ 0.002539520025253296,
445
+ 0.0025825281143188475,
446
+ 0.002519040107727051,
447
+ 0.002574336051940918,
448
+ 0.0024821760654449463,
449
+ 0.002514944076538086,
450
+ 0.00248524808883667,
451
+ 0.0024729599952697753,
452
+ 0.0024913918972015382,
453
+ 0.00250982403755188,
454
+ 0.0024832000732421876,
455
+ 0.002548736095428467,
456
+ 0.0025538558959960936,
457
+ 0.002510848045349121,
458
+ 0.002470911979675293,
459
+ 0.002489343881607056,
460
+ 0.00250163197517395,
461
+ 0.0025364480018615723,
462
+ 0.002471935987472534,
463
+ 0.002580480098724365,
464
+ 0.002513920068740845,
465
+ 0.002481152057647705,
466
+ 0.002531327962875366,
467
+ 0.0025128960609436036,
468
+ 0.0024821760654449463,
469
+ 0.0025374720096588135,
470
+ 0.002497535943984985,
471
+ 0.0025784320831298826,
472
+ 0.002569216012954712,
473
+ 0.002470911979675293,
474
+ 0.002481152057647705,
475
+ 0.0025047039985656738,
476
+ 0.0025681920051574708,
477
+ 0.0025968639850616456,
478
+ 0.0025456640720367433,
479
+ 0.0025538558959960936,
480
+ 0.002603008031845093,
481
+ 0.002527231931686401,
482
+ 0.0025047039985656738,
483
+ 0.0024985599517822264,
484
+ 0.002554879903793335,
485
+ 0.002510848045349121,
486
+ 0.0025589759349822997,
487
+ 0.002496511936187744,
488
+ 0.002513920068740845,
489
+ 0.0025815041065216063,
490
+ 0.0025210878849029543,
491
+ 0.002573312044143677,
492
+ 0.0025671679973602295,
493
+ 0.0025374720096588135,
494
+ 0.0025815041065216063,
495
+ 0.002573312044143677,
496
+ 0.00250163197517395,
497
+ 0.0025958399772644043,
498
+ 0.0025374720096588135,
499
+ 0.002497535943984985,
500
+ 0.002540544033050537,
501
+ 0.002466815948486328,
502
+ 0.002573312044143677,
503
+ 0.002634752035140991,
504
+ 0.0024821760654449463,
505
+ 0.0025036799907684325,
506
+ 0.0025221118927001955,
507
+ 0.0024627199172973634,
508
+ 0.002471935987472534,
509
+ 0.0025210878849029543,
510
+ 0.002642944097518921,
511
+ 0.0024862720966339113,
512
+ 0.002471935987472534,
513
+ 0.002510848045349121,
514
+ 0.0025169920921325685,
515
+ 0.002451456069946289,
516
+ 0.002451456069946289,
517
+ 0.0025323519706726074,
518
+ 0.002548736095428467,
519
+ 0.0025047039985656738,
520
+ 0.0024750080108642577,
521
+ 0.002514944076538086,
522
+ 0.0025528318881988523,
523
+ 0.0024442880153656005,
524
+ 0.0024576001167297364,
525
+ 0.0024729599952697753,
526
+ 0.0025681920051574708,
527
+ 0.0024576001167297364,
528
+ 0.0024739840030670165,
529
+ 0.0025128960609436036,
530
+ 0.002519040107727051,
531
+ 0.0024688639640808104,
532
+ 0.0025241599082946775,
533
+ 0.0024627199172973634,
534
+ 0.0025671679973602295,
535
+ 0.0024995839595794676,
536
+ 0.002484224081039429,
537
+ 0.0025088000297546387,
538
+ 0.002455552101135254,
539
+ 0.0024637439250946047,
540
+ 0.00253439998626709,
541
+ 0.0024729599952697753,
542
+ 0.0025917439460754394,
543
+ 0.002514944076538086,
544
+ 0.0024371199607849123,
545
+ 0.002500607967376709,
546
+ 0.002550784111022949,
547
+ 0.0024750080108642577,
548
+ 0.0025231359004974367,
549
+ 0.0024821760654449463,
550
+ 0.002555903911590576,
551
+ 0.0024954879283905027,
552
+ 0.0025159680843353273,
553
+ 0.002461695909500122,
554
+ 0.0025077760219573975,
555
+ 0.0024279038906097413,
556
+ 0.0024115200042724608,
557
+ 0.002484224081039429,
558
+ 0.002550784111022949,
559
+ 0.0024995839595794676,
560
+ 0.0025047039985656738,
561
+ 0.0024524800777435303,
562
+ 0.0024883201122283937,
563
+ 0.002421760082244873,
564
+ 0.002406399965286255,
565
+ 0.002480128049850464,
566
+ 0.0025210878849029543,
567
+ 0.002494463920593262,
568
+ 0.002421760082244873,
569
+ 0.002441215991973877,
570
+ 0.002470911979675293,
571
+ 0.002484224081039429,
572
+ 0.00252620792388916,
573
+ 0.002471935987472534,
574
+ 0.0025661439895629883,
575
+ 0.0024780800342559814,
576
+ 0.002496511936187744
577
+ ],
578
+ "count": 385,
579
+ "total": 0.9895669796466826,
580
+ "mean": 0.002570303843238137,
581
+ "p50": 0.0025528318881988523,
582
+ "p90": 0.002687999963760376,
583
+ "p95": 0.0027318272590637206,
584
+ "p99": 0.0028153855800628655,
585
+ "stdev": 8.564019716831984e-05,
586
+ "stdev_": 3.3319094702993572
587
+ },
588
+ "throughput": {
589
+ "unit": "samples/s",
590
+ "value": 778.1181222062629
591
+ },
592
+ "energy": {
593
+ "unit": "kWh",
594
+ "cpu": 7.926489950800206e-08,
595
+ "ram": 1.3301133581781163e-07,
596
+ "gpu": 5.085165812373287e-08,
597
+ "total": 2.631278934495463e-07
598
+ },
599
+ "efficiency": {
600
+ "unit": "samples/kWh",
601
+ "value": 7600866.535966442
602
+ }
603
+ }
604
+ }
605
+ }