IlyasMoutawwakil
HF Staff
Upload cpu_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
675fc9a
verified
| { | |
| "config": { | |
| "name": "cpu_inference_transformers_token-classification_microsoft/deberta-v3-base", | |
| "backend": { | |
| "name": "pytorch", | |
| "version": "2.5.1+cpu", | |
| "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
| "task": "token-classification", | |
| "library": "transformers", | |
| "model_type": "deberta-v2", | |
| "model": "microsoft/deberta-v3-base", | |
| "processor": "microsoft/deberta-v3-base", | |
| "device": "cpu", | |
| "device_ids": null, | |
| "seed": 42, | |
| "inter_op_num_threads": null, | |
| "intra_op_num_threads": null, | |
| "model_kwargs": {}, | |
| "processor_kwargs": {}, | |
| "no_weights": true, | |
| "device_map": null, | |
| "torch_dtype": null, | |
| "eval_mode": true, | |
| "to_bettertransformer": false, | |
| "low_cpu_mem_usage": null, | |
| "attn_implementation": null, | |
| "cache_implementation": null, | |
| "autocast_enabled": false, | |
| "autocast_dtype": null, | |
| "torch_compile": false, | |
| "torch_compile_target": "forward", | |
| "torch_compile_config": {}, | |
| "quantization_scheme": null, | |
| "quantization_config": {}, | |
| "deepspeed_inference": false, | |
| "deepspeed_inference_config": {}, | |
| "peft_type": null, | |
| "peft_config": {} | |
| }, | |
| "scenario": { | |
| "name": "inference", | |
| "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
| "iterations": 1, | |
| "duration": 1, | |
| "warmup_runs": 1, | |
| "input_shapes": { | |
| "batch_size": 2, | |
| "sequence_length": 16, | |
| "num_choices": 2 | |
| }, | |
| "new_tokens": null, | |
| "memory": true, | |
| "latency": true, | |
| "energy": true, | |
| "forward_kwargs": {}, | |
| "generate_kwargs": { | |
| "max_new_tokens": 2, | |
| "min_new_tokens": 2 | |
| }, | |
| "call_kwargs": { | |
| "num_inference_steps": 2 | |
| } | |
| }, | |
| "launcher": { | |
| "name": "process", | |
| "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
| "device_isolation": false, | |
| "device_isolation_action": null, | |
| "numactl": false, | |
| "numactl_kwargs": {}, | |
| "start_method": "spawn" | |
| }, | |
| "environment": { | |
| "cpu": " AMD EPYC 7763 64-Core Processor", | |
| "cpu_count": 4, | |
| "cpu_ram_mb": 16766.779392, | |
| "system": "Linux", | |
| "machine": "x86_64", | |
| "platform": "Linux-6.8.0-1017-azure-x86_64-with-glibc2.39", | |
| "processor": "x86_64", | |
| "python_version": "3.10.15", | |
| "optimum_benchmark_version": "0.5.0.dev0", | |
| "optimum_benchmark_commit": "12b33198089c125596a4363f88200dc72e18e532", | |
| "transformers_version": "4.47.0", | |
| "transformers_commit": null, | |
| "accelerate_version": "1.2.0", | |
| "accelerate_commit": null, | |
| "diffusers_version": "0.31.0", | |
| "diffusers_commit": null, | |
| "optimum_version": null, | |
| "optimum_commit": null, | |
| "timm_version": "1.0.12", | |
| "timm_commit": null, | |
| "peft_version": null, | |
| "peft_commit": null | |
| }, | |
| "print_report": true, | |
| "log_report": true | |
| }, | |
| "report": { | |
| "load_model": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1183.383552, | |
| "max_global_vram": null, | |
| "max_process_vram": null, | |
| "max_reserved": null, | |
| "max_allocated": null | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "values": [ | |
| 1.7768922690000295 | |
| ], | |
| "count": 1, | |
| "total": 1.7768922690000295, | |
| "mean": 1.7768922690000295, | |
| "p50": 1.7768922690000295, | |
| "p90": 1.7768922690000295, | |
| "p95": 1.7768922690000295, | |
| "p99": 1.7768922690000295, | |
| "stdev": 0, | |
| "stdev_": 0 | |
| }, | |
| "throughput": null, | |
| "energy": { | |
| "unit": "kWh", | |
| "cpu": 0.00020320646534999868, | |
| "ram": 8.498917798985873e-06, | |
| "gpu": 0, | |
| "total": 0.00021170538314898457 | |
| }, | |
| "efficiency": null | |
| }, | |
| "forward": { | |
| "memory": { | |
| "unit": "MB", | |
| "max_ram": 1220.489216, | |
| "max_global_vram": null, | |
| "max_process_vram": null, | |
| "max_reserved": null, | |
| "max_allocated": null | |
| }, | |
| "latency": { | |
| "unit": "s", | |
| "values": [ | |
| 0.3311455580000029, | |
| 0.22746716300002845, | |
| 0.18423116399998207, | |
| 0.1823831610000184, | |
| 0.19049313300001813 | |
| ], | |
| "count": 5, | |
| "total": 1.11572017900005, | |
| "mean": 0.22314403580001, | |
| "p50": 0.19049313300001813, | |
| "p90": 0.28967420000001315, | |
| "p95": 0.310409879000008, | |
| "p99": 0.3269984222000039, | |
| "stdev": 0.056435379304375394, | |
| "stdev_": 25.291009505158762 | |
| }, | |
| "throughput": { | |
| "unit": "samples/s", | |
| "value": 8.962820775512345 | |
| }, | |
| "energy": { | |
| "unit": "kWh", | |
| "cpu": 8.377803101110873e-06, | |
| "ram": 3.503055281385345e-07, | |
| "gpu": 0.0, | |
| "total": 8.728108629249413e-06 | |
| }, | |
| "efficiency": { | |
| "unit": "samples/kWh", | |
| "value": 229144.71908583396 | |
| } | |
| } | |
| } | |
| } |