kmfoda's picture
Upload GPTOptim
0bde77e verified
raw
history blame
6.59 kB
{
"_name_or_path": "distributed/optimized-gpt2-2b",
"activation_function": "gelu_new",
"all_reduce_scores": {
"0": "SUCCESS",
"1": "SUCCESS",
"10": "SUCCESS",
"100": "SUCCESS",
"101": "SUCCESS",
"102": "SUCCESS",
"103": "SUCCESS",
"104": "SUCCESS",
"105": "SUCCESS",
"106": "SUCCESS",
"107": "SUCCESS",
"108": "SUCCESS",
"109": "SUCCESS",
"11": "SUCCESS",
"110": "SUCCESS",
"111": "SUCCESS",
"112": "SUCCESS",
"113": "SUCCESS",
"114": "SUCCESS",
"115": "SUCCESS",
"116": "SUCCESS",
"117": "SUCCESS",
"118": "SUCCESS",
"119": "SUCCESS",
"12": "SUCCESS",
"120": "SUCCESS",
"121": "SUCCESS",
"122": "SUCCESS",
"123": "SUCCESS",
"124": "SUCCESS",
"125": "SUCCESS",
"126": "SUCCESS",
"127": "SUCCESS",
"128": "SUCCESS",
"129": "SUCCESS",
"13": "SUCCESS",
"130": "SUCCESS",
"131": "SUCCESS",
"132": "SUCCESS",
"133": "SUCCESS",
"134": "SUCCESS",
"135": "SUCCESS",
"136": "SUCCESS",
"137": "SUCCESS",
"138": "SUCCESS",
"139": "SUCCESS",
"14": "SUCCESS",
"140": "SUCCESS",
"141": "SUCCESS",
"142": "SUCCESS",
"143": "SUCCESS",
"144": "SUCCESS",
"145": "SUCCESS",
"146": "SUCCESS",
"147": "SUCCESS",
"148": "SUCCESS",
"149": "SUCCESS",
"15": "SUCCESS",
"150": "SUCCESS",
"151": "SUCCESS",
"152": "SUCCESS",
"153": "SUCCESS",
"154": "SUCCESS",
"155": "SUCCESS",
"156": "SUCCESS",
"157": "SUCCESS",
"158": "SUCCESS",
"159": "SUCCESS",
"16": "SUCCESS",
"160": "SUCCESS",
"161": "SUCCESS",
"162": "SUCCESS",
"163": "SUCCESS",
"164": "SUCCESS",
"165": "SUCCESS",
"166": "SUCCESS",
"167": "SUCCESS",
"168": "SUCCESS",
"169": "SUCCESS",
"17": "SUCCESS",
"170": "SUCCESS",
"171": "SUCCESS",
"172": "SUCCESS",
"173": "SUCCESS",
"174": "SUCCESS",
"175": "SUCCESS",
"176": "SUCCESS",
"177": "SUCCESS",
"178": "SUCCESS",
"179": "SUCCESS",
"18": "SUCCESS",
"180": "SUCCESS",
"181": "SUCCESS",
"182": "SUCCESS",
"183": "SUCCESS",
"184": "SUCCESS",
"185": "SUCCESS",
"186": "SUCCESS",
"187": "SUCCESS",
"188": "SUCCESS",
"189": "SUCCESS",
"19": "SUCCESS",
"190": "SUCCESS",
"191": "SUCCESS",
"192": "SUCCESS",
"193": "SUCCESS",
"194": "SUCCESS",
"195": "SUCCESS",
"196": "SUCCESS",
"197": "SUCCESS",
"198": "SUCCESS",
"199": "SUCCESS",
"2": "SUCCESS",
"20": "SUCCESS",
"200": "SUCCESS",
"201": "SUCCESS",
"202": "SUCCESS",
"203": "SUCCESS",
"204": "SUCCESS",
"205": "SUCCESS",
"206": "SUCCESS",
"207": "SUCCESS",
"208": "SUCCESS",
"209": "SUCCESS",
"21": "SUCCESS",
"210": "SUCCESS",
"211": "SUCCESS",
"212": "SUCCESS",
"213": "SUCCESS",
"214": "SUCCESS",
"215": "SUCCESS",
"216": "SUCCESS",
"217": "SUCCESS",
"218": "SUCCESS",
"219": "SUCCESS",
"22": "SUCCESS",
"220": "SUCCESS",
"221": "SUCCESS",
"222": "SUCCESS",
"223": "SUCCESS",
"224": "SUCCESS",
"225": "SUCCESS",
"226": "SUCCESS",
"227": "SUCCESS",
"228": "SUCCESS",
"229": "SUCCESS",
"23": "SUCCESS",
"230": "SUCCESS",
"231": "SUCCESS",
"232": "SUCCESS",
"233": "SUCCESS",
"234": "SUCCESS",
"235": "SUCCESS",
"236": "SUCCESS",
"237": "SUCCESS",
"238": "SUCCESS",
"239": "SUCCESS",
"24": "SUCCESS",
"240": "SUCCESS",
"241": "SUCCESS",
"242": "SUCCESS",
"243": "SUCCESS",
"244": "SUCCESS",
"245": "SUCCESS",
"246": "SUCCESS",
"247": "SUCCESS",
"248": "SUCCESS",
"249": "SUCCESS",
"25": "SUCCESS",
"250": "SUCCESS",
"251": "SUCCESS",
"252": "SUCCESS",
"253": "SUCCESS",
"254": "SUCCESS",
"255": "SUCCESS",
"26": "SUCCESS",
"27": "SUCCESS",
"28": "SUCCESS",
"29": "SUCCESS",
"3": "SUCCESS",
"30": "SUCCESS",
"31": "SUCCESS",
"32": "SUCCESS",
"33": "SUCCESS",
"34": "SUCCESS",
"35": "SUCCESS",
"36": "SUCCESS",
"37": "SUCCESS",
"38": "SUCCESS",
"39": "SUCCESS",
"4": "SUCCESS",
"40": "SUCCESS",
"41": "SUCCESS",
"42": "SUCCESS",
"43": "SUCCESS",
"44": "SUCCESS",
"45": "SUCCESS",
"46": "SUCCESS",
"47": "SUCCESS",
"48": "SUCCESS",
"49": "SUCCESS",
"5": "SUCCESS",
"50": "SUCCESS",
"51": "SUCCESS",
"52": "SUCCESS",
"53": "SUCCESS",
"54": "SUCCESS",
"55": "SUCCESS",
"56": "SUCCESS",
"57": "SUCCESS",
"58": "SUCCESS",
"59": "SUCCESS",
"6": "SUCCESS",
"60": "SUCCESS",
"61": "SUCCESS",
"62": "SUCCESS",
"63": "SUCCESS",
"64": "SUCCESS",
"65": "SUCCESS",
"66": "SUCCESS",
"67": "SUCCESS",
"68": "SUCCESS",
"69": "SUCCESS",
"7": "SUCCESS",
"70": "SUCCESS",
"71": "SUCCESS",
"72": "SUCCESS",
"73": "SUCCESS",
"74": "SUCCESS",
"75": "SUCCESS",
"76": "SUCCESS",
"77": "SUCCESS",
"78": "SUCCESS",
"79": "SUCCESS",
"8": "SUCCESS",
"80": "SUCCESS",
"81": "SUCCESS",
"82": "SUCCESS",
"83": "SUCCESS",
"84": "SUCCESS",
"85": "SUCCESS",
"86": "SUCCESS",
"87": "SUCCESS",
"88": "SUCCESS",
"89": "SUCCESS",
"9": "SUCCESS",
"90": "SUCCESS",
"91": "SUCCESS",
"92": "SUCCESS",
"93": "SUCCESS",
"94": "SUCCESS",
"95": "SUCCESS",
"96": "SUCCESS",
"97": "SUCCESS",
"98": "SUCCESS",
"99": "SUCCESS"
},
"architectures": [
"GPTOptim"
],
"attn_pdrop": 0.1,
"auto_map": {
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
},
"block_size": 1024,
"bos_token_id": 50256,
"embd_pdrop": 0.1,
"eos_token_id": 50256,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "gpt_optimized",
"n_embd": 1904,
"n_head": 34,
"n_inner": null,
"n_layer": 44,
"n_positions": 1024,
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.1,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"torch_dtype": "float32",
"transformers_version": "4.39.3",
"use_cache": true,
"vocab_size": 50257
}