public-leaderboard-text / submissions /laureBe_20250129_174113.json
TheoLvs's picture
Upload submissions/laureBe_20250129_174113.json with huggingface_hub
5d0b48e verified
raw
history blame
1.28 kB
{"username": "laureBe", "space_url": "https://huggingface.co/spaces/laureBe/baseline", "submission_timestamp": "2025-01-29T16:41:06.917679", "model_description": "Attention GRU classification", "accuracy": 0.27794871794871795, "energy_consumed_wh": 8.26773855088484, "emissions_gco2eq": 3.0519065265441627, "emissions_data": {"run_id": "b0d50514-4137-426a-9af8-2553aff40746", "duration": 151.52643818000797, "emissions": 0.003051906526544163, "emissions_rate": 2.0141088908068867e-05, "cpu_power": 150, "gpu_power": 0, "ram_power": 46.42727851867676, "cpu_energy": 0.0063135931382081855, "gpu_energy": 0, "ram_energy": 0.0019541454126766524, "energy_consumed": 0.00826773855088484, "country_name": "United States", "country_iso_code": "USA", "region": "virginia", "cloud_provider": "", "cloud_region": "", "os": "Linux-5.10.230-223.885.amzn2.x86_64-x86_64-with-glibc2.36", "python_version": "3.9.21", "codecarbon_version": "2.8.2", "cpu_count": 16, "cpu_model": "Intel(R) Xeon(R) Platinum 8375C CPU @ 2.90GHz", "gpu_count": null, "gpu_model": null, "ram_total_size": 123.80607604980469, "tracking_mode": "machine", "on_cloud": "N", "pue": 1}, "api_route": "/text", "dataset_config": {"dataset_name": "QuotaClimat/frugalaichallenge-text-train", "test_size": 0.2, "test_seed": 42}}