Dataset Viewer
	| task
				 stringclasses 1
				value | org
				 stringclasses 7
				values | model
				 stringclasses 9
				values | hardware
				 stringclasses 2
				values | date
				 stringclasses 9
				values | prefill
				 dict | decode
				 dict | preprocess
				 dict | 
|---|---|---|---|---|---|---|---|
| 
	text_generation | 
	NousResearch | 
	Hermes-3-Llama-3.1-8B | 
	a100-large | 
	2024-10-30-23-07-10 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 7896383.83303204
  },
  "energy": {
    "cpu": 0.010771365165906027,
    "gpu": 0.03499995191660332,
    "ram": 0.000042438637758223256,
    "total": 0.04581375572026757,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 383715.9018998692
  },
  "energy": {
    "cpu": 0.006909195393007252,
    "gpu": 0.016518416020279855,
    "ram": 0.000027241396927033754,
    "total": 0.023454852810214137,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 19314595.743536726
  },
  "energy": {
    "cpu": 0.00003136402598271767,
    "gpu": 0.000020315571802598242,
    "ram": 9.47190721638247e-8,
    "total": 0.000051774316857479744,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	meta-llama | 
	Llama-3.1-8B-Instruct | 
	a100-large | 
	2024-10-30-19-38-09 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 7875937.087230494
  },
  "energy": {
    "cpu": 0.010796036717298752,
    "gpu": 0.035094443769978056,
    "ram": 0.00004221247271874213,
    "total": 0.04593269295999555,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 373534.5783406723
  },
  "energy": {
    "cpu": 0.006907515534659228,
    "gpu": 0.01715961075545351,
    "ram": 0.000027028633857338117,
    "total": 0.024094154923970088,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 18903508.14097379
  },
  "energy": {
    "cpu": 0.000031577332566181814,
    "gpu": 0.000021227516981525696,
    "ram": 9.538423654320006e-8,
    "total": 0.000052900233784250705,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	EleutherAI | 
	pythia-1.4b | 
	a10g-large | 
	2024-10-25-14-19-11 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 54946841.77704233
  },
  "energy": {
    "cpu": 0.0006950257205501962,
    "gpu": 0.004783902188229749,
    "ram": 0.000007229079517594969,
    "total": 0.00548615698829754,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 1185499.0738062232
  },
  "energy": {
    "cpu": 0.001532609044249083,
    "gpu": 0.006043184112321255,
    "ram": 0.00001594622041976216,
    "total": 0.0075917393769901025,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 33069647.189926323
  },
  "energy": {
    "cpu": 0.000011265942924405358,
    "gpu": 0.00001888834844376852,
    "ram": 8.491845032760329e-8,
    "total": 0.00003023920981850148,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	microsoft | 
	phi-2 | 
	a10g-large | 
	2024-10-25-00-12-06 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 25532119.335130304
  },
  "energy": {
    "cpu": 0.001485621188480941,
    "gpu": 0.010244187250898752,
    "ram": 0.000014692518964651205,
    "total": 0.011744500958344343,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 653974.0928690574
  },
  "energy": {
    "cpu": 0.002664958041045172,
    "gpu": 0.011070685912097256,
    "ram": 0.000026369096206358968,
    "total": 0.013762013049348782,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 2594586258.0208464
  },
  "energy": {
    "cpu": 3.829965067173665e-7,
    "gpu": 0,
    "ram": 2.4213983180210174e-9,
    "total": 3.8541790503538754e-7,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	allenai | 
	OLMo-1B-hf | 
	a10g-large | 
	2024-10-24-18-23-56 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 62297846.58810162
  },
  "energy": {
    "cpu": 0.0006078373521613038,
    "gpu": 0.004224683213077207,
    "ram": 0.0000062823229751878135,
    "total": 0.0048388028882136985,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 1390874.1413723528
  },
  "energy": {
    "cpu": 0.0012634316170816028,
    "gpu": 0.005194258099847594,
    "ram": 0.000013061109683948865,
    "total": 0.006470750826613145,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 35562851.946256354
  },
  "energy": {
    "cpu": 0.00001108087887082042,
    "gpu": 0.000016956680231938748,
    "ram": 8.167052233473494e-8,
    "total": 0.000028119229625093902,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	openai-community | 
	gpt2-large | 
	a10g-large | 
	2024-10-25-15-05-20 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 77907728.2032169
  },
  "energy": {
    "cpu": 0.00042952262899040045,
    "gpu": 0.0027924934839930414,
    "ram": 0.000004223514289139063,
    "total": 0.0032262396272725808,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 1343266.704857417
  },
  "energy": {
    "cpu": 0.0016164460666061049,
    "gpu": 0.005067730581958951,
    "ram": 0.00001590753237599822,
    "total": 0.006700084180941058,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 34837547.66517286
  },
  "energy": {
    "cpu": 0.000010432217698123875,
    "gpu": 0.000018202236783837478,
    "ram": 7.020663859453463e-8,
    "total": 0.00002870466112055589,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	HuggingFaceTB | 
	SmolLM-135M | 
	a10g-large | 
	2024-10-23-19-09-15 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 218923397.1364946
  },
  "energy": {
    "cpu": 0.00028216661832921095,
    "gpu": 0.0011204146741087939,
    "ram": 0.000002878352463094162,
    "total": 0.001405459644901099,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 1478710.1205686298
  },
  "energy": {
    "cpu": 0.0020209151121617142,
    "gpu": 0.004044847485875458,
    "ram": 0.000020623012510417966,
    "total": 0.00608638561054759,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 35187752.05342098
  },
  "energy": {
    "cpu": 0.000010718486329682895,
    "gpu": 0.000017620569652265772,
    "ram": 7.992339706452299e-8,
    "total": 0.00002841897937901319,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	HuggingFaceTB | 
	SmolLM-1.7B | 
	a10g-large | 
	2024-10-24-15-16-02 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 41960978.223952904
  },
  "energy": {
    "cpu": 0.0009210179793032473,
    "gpu": 0.006402295760721532,
    "ram": 0.000009403775706541059,
    "total": 0.007332717515731321,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 986792.3688060087
  },
  "energy": {
    "cpu": 0.0017958659208215804,
    "gpu": 0.007306251844996492,
    "ram": 0.000018341901791392123,
    "total": 0.009120459667609458,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 37376068.58504866
  },
  "energy": {
    "cpu": 0.000010438337355784218,
    "gpu": 0.000016238901880072376,
    "ram": 7.78486547950319e-8,
    "total": 0.000026755087890651626,
    "unit": "kWh"
  }
} | 
| 
	text_generation | 
	HuggingFaceTB | 
	SmolLM-360M | 
	a10g-large | 
	2024-10-24-14-22-22 | 
	{
  "efficency": {
    "unit": "tokens/kWh",
    "value": 124481811.63546507
  },
  "energy": {
    "cpu": 0.00038417323045180534,
    "gpu": 0.0020836598613710013,
    "ram": 0.000003917567631278797,
    "total": 0.002471750659454085,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "tokens/kWh",
    "value": 1315130.308155782
  },
  "energy": {
    "cpu": 0.0021575579513180274,
    "gpu": 0.004663859981084995,
    "ram": 0.00002201039115281833,
    "total": 0.0068434283235558405,
    "unit": "kWh"
  }
} | 
	{
  "efficiency": {
    "unit": "samples/kWh",
    "value": 36616831.97480331
  },
  "energy": {
    "cpu": 0.000010503983233461947,
    "gpu": 0.000016727513381886716,
    "ram": 7.834823109019483e-8,
    "total": 0.00002730984484643886,
    "unit": "kWh"
  }
} | 
Analysis of energy usage for HUGS models
Based on the energy_star branch of optimum-benchmark, and using codecarbon.
Fields
- task: Task the model was benchmarked on.
- org: Organization hosting the model.
- model: The specific model. Model names at HF are usually constructed with {org}/{model}.
- date: The date that the benchmark was run.
- prefill: The esimated energy and efficiency for prefilling.
- decode: The estimated energy and efficiency for decoding.
- preprocess: The estimated energy and efficiency for preprocessing.
Code to Reproduce
As I'm devving, I'm hopping between https://huggingface.co/spaces/AIEnergyScore/benchmark-hugs-models and https://huggingface.co/spaces/meg/CalculateCarbon
From there, python code/make_pretty_dataset.py (included in this repository) takes the raw results and uploads them to the dataset here.
- Downloads last month
- 24
