sharpenb's picture
Upload folder using huggingface_hub (#2)
75b3575 verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 54.22916030883789,
"base_token_generation_latency_async": 54.13356442004442,
"base_token_generation_throughput_sync": 0.018440263398971105,
"base_token_generation_throughput_async": 0.018472827546336907,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 52.29578285217285,
"base_inference_latency_async": 51.306843757629395,
"base_inference_throughput_sync": 0.019122000770630986,
"base_inference_throughput_async": 0.019490577216636887,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 41.130767822265625,
"smashed_token_generation_latency_async": 40.58046396821737,
"smashed_token_generation_throughput_sync": 0.024312699542133578,
"smashed_token_generation_throughput_async": 0.024642399376783867,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 51.22703399658203,
"smashed_inference_latency_async": 39.65771198272705,
"smashed_inference_throughput_sync": 0.01952094279100215,
"smashed_inference_throughput_async": 0.025215776453153697,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}